In [ ]:
!pip install numpy pandas scikit-learn tensorflow networkx
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.26.4) Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.1.4) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.3.2) Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.17.0) Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (3.3) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: scipy>=1.5.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.13.1) Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0) Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3) Requirement already satisfied: flatbuffers>=24.3.25 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25) Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.6.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: h5py>=3.10.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.11.0) Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1) Requirement already satisfied: ml-dtypes<0.5.0,>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.4.0) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.1) Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.31.0) Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (71.0.4) Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0) Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.12.2) Requirement already satisfied: wrapt>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.64.1) Requirement already satisfied: tensorboard<2.18,>=2.17 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.17.0) Requirement already satisfied: keras>=3.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.4.1) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.37.1) Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0) Requirement already satisfied: rich in /usr/local/lib/python3.10/dist-packages (from keras>=3.2.0->tensorflow) (13.7.1) Requirement already satisfied: namex in /usr/local/lib/python3.10/dist-packages (from keras>=3.2.0->tensorflow) (0.0.8) Requirement already satisfied: optree in /usr/local/lib/python3.10/dist-packages (from keras>=3.2.0->tensorflow) (0.12.1) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorflow) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorflow) (3.7) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorflow) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorflow) (2024.7.4) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.18,>=2.17->tensorflow) (3.6) Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.18,>=2.17->tensorflow) (0.7.2) Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.18,>=2.17->tensorflow) (3.0.3) Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.18,>=2.17->tensorflow) (2.1.5) Requirement already satisfied: markdown-it-py>=2.2.0 in /usr/local/lib/python3.10/dist-packages (from rich->keras>=3.2.0->tensorflow) (3.0.0) Requirement already satisfied: pygments<3.0.0,>=2.13.0 in /usr/local/lib/python3.10/dist-packages (from rich->keras>=3.2.0->tensorflow) (2.16.1) Requirement already satisfied: mdurl~=0.1 in /usr/local/lib/python3.10/dist-packages (from markdown-it-py>=2.2.0->rich->keras>=3.2.0->tensorflow) (0.1.2)
In [ ]:
import numpy as np
import pandas as pd
import networkx as nx
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model
In [ ]:
from google.colab import files
uploaded = files.upload()
Saving fake.csv to fake.csv
In [ ]:
import os
# List files and directories in the current directory
files = os.listdir('.')
print(files)
['.config', 'fake.csv', 'drive', 'sample_data']
In [ ]:
# Print the current working directory
current_dir = os.getcwd()
print(current_dir)
/content
In [ ]:
!pip install pandas
Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: numpy>=1.21.0 in /usr/local/lib/python3.10/dist-packages (from pandas) (1.25.2) Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.2->pandas) (1.16.0)
In [ ]:
import pandas as pd
# Path to your CSV file
file_path = '/content/fake.csv'
try:
# Attempt to read the CSV file with different options
df = pd.read_csv(
file_path,
delimiter=',', # Adjust the delimiter if needed
quotechar='"', # Adjust the quote character if needed
quoting=3, # Adjust quoting if needed (3 corresponds to csv.QUOTE_NONE)
on_bad_lines='skip' # Skip bad lines
)
# Display the first few rows of the dataframe
print(df.head())
except pd.errors.ParserError:
print("Error parsing the CSV file. There might be formatting issues in the file.")
except FileNotFoundError:
print("The file was not found. Please check the file path.")
except Exception as e:
print(f"An unexpected error occurred: {e}")
title \
Donald Trump Sends Out Embarrassing New Year’s... "Donald Trump just couldn t wish all Americans ... he had to give a shout out to his enemies haters and the very dishonest fake news media... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year President Angry Pants tweeted. 2018 will be ... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year. 2018 will be a g... 2017Trump s tweet went down about as welll as ... petty infantile gibberish? Only Trump! His lack of d... 2017no one likes you Calvin (@calvinstowell) ... 2017Your impeachment would make 2018 a great y... but I ll also accept regaining control of Cong... 2017Do you hear yourself talk? When you have t... 2017Who uses the word Haters in a New Years wi... 2017You can t just say happy new year? Koren ... 2017Here s Trump s New Year s Eve tweet from 2... including to my many enemies and those who hav... 2016This is nothing new for Trump. He s been d... Easter Thanksgiving and the anniversary of 9/11. pic.twitter.com/4... 2017Trump s holiday tweets are clearly not pre... 2017He s always been like this . . . the only ... his filter has been breaking down. Roy Schulz... 2017Who apart from a teenager uses the term haters? W... 2017he s a fucking 5 year old Who Knows (@rai... 2017So to all the people who voted for this a hole th... you were wrong! 70-year-old men don t change ...
Drunk Bragging Trump Staffer Started Russian C... "House Intelligence Committee Chairman Devin Nu... like many of us that the Christopher Steele-dossier was what p... the dossier is not what started the investigation according to documents obtained by the New Yor... Papadopoulos wasn t just a covfefe boy for Trump as his administration has alleged. He had a mu... but none so damning as being a drunken fool in... but team Trump ran with him being merely a cof... Papadopoulos revealed to Australian diplomat A... Alexander Downer is unclear the report states. But two months later when leaked Democratic emails began appearing ... Australian officials passed the information ab... according to four current and former American ... News "December 31 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Sheriff David Clarke Becomes An Internet Joke ... "On Friday it was revealed that former Milwaukee Sheriff ... who was being considered for Homeland Security... has an email scandal of his own.In January there was a brief run-in on a plane between Cl... who he later had detained by the police for no... except that maybe his feelings were hurt. Clar... and now a search warrant has been executed by the FBI ... the former sheriff tweeted. I will continue ... Jr. (@SheriffClarke) December 30 2017He didn t stop there.BREAKING NEWS! When L... the ANTIDOTE is go right at them. Punch them i... Jr. (@SheriffClarke) December 30 2017The internet called him out.This is your l... and just because the chose not to file charges... email search warrant filed https://t.co/zcbyc4... 2017I just hope the rest of the Village People... 2017Slaw baked potatoes or French fries? pic.twitter.com/fWfXsZupxy A... 2017pic.twitter.com/ymsOBLjfxU Pendulum Swing... 2017you called your police friends to stand up... 2017Is it me with this masterful pshop of your hat which I seem to never tire of. I think it s th... 2017Are you indicating with your fingers how m... dipshit Ike Barinholtz (@ikebarinholtz) Decem... 2017ROFL. Internet tough guy with fake flair. ... 2017You re so edgy buddy. Mrs. SMH (@MRSSMH2) December 30 2017Is his break over at Applebees? Aaron (@f... 2017Are you trying to earn your still relevan... 2017make sure to hydrate drink lots of water. It s rumored that prisone... 2017Terrill Thomas the 38-year-old black man who died of thirst i... was a victim of homicide. We just thought we s... News "December 30
Trump Is So Obsessed He Even Has Obama’s Name ... "On Christmas day Donald Trump announced that he would be back ... but he is golfing for the fourth day in a row.... he ll pass Obama s first-term total by July 24... 2017 That makes what a Washington Post reporte... but everything about this administration is bi... we are working to fix the problem and not on... the coding wasn t done correctly.The website o... who has spent several days in a row at the gol... is coded to serve up the following message in ... 2017That snippet of code appears to be on all ... which the footer says is paid for by the RNC? ... 2017It s also all over https://t.co/ayBlGmk65Z... this is weird code and it s not clear it would... but who knows. Christopher Ingraham (@_cingra... 2017After the coding was called out the reference to Obama was deleted.UPDATE: The... since the actual 404 (and presumably 500) page... 2017That suggests someone at either RNC or the... 2017 The code was f-cked up.The best part abou... 2017trump s coders can t code. Nobody is surpr... 2017Donald Trump is obsessed with Obama that h... News "December 29 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Pope Francis Just Called Out Donald Trump Duri... "Pope Francis used his annual Christmas Day mes... Francis said. On this festive day let us ask the Lord for peace for Jerusalem an... and that is an issue Trump continues to fight ... as the winds of war are blowing in our world a... societal and environmental decline Christmas invites us to focus on the sign of t... especially those for whom like Jesus there is no place in the inn he said. Jesus knows well the pain of not bei... he added. May our hearts not be closed as th... their home and their land Francis said. This was no comfortable or eas... they were full of hope and expectation because... Francis said Sunday. We see the tracks of ent... but driven from their land leave behind their dear ones. Amen to that.Pho... News "December 25 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
text \
Donald Trump Sends Out Embarrassing New Year’s... "Donald Trump just couldn t wish all Americans ... he had to give a shout out to his enemies haters and the very dishonest fake news media... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year President Angry Pants tweeted. 2018 will be ... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year. 2018 will be a g... 2017Trump s tweet went down about as welll as ... petty infantile gibberish? Only Trump! His lack of d... 2017no one likes you Calvin (@calvinstowell) ... 2017Your impeachment would make 2018 a great y... but I ll also accept regaining control of Cong... 2017Do you hear yourself talk? When you have t... 2017Who uses the word Haters in a New Years wi... 2017You can t just say happy new year? Koren ... 2017Here s Trump s New Year s Eve tweet from 2... including to my many enemies and those who hav... 2016This is nothing new for Trump. He s been d... Easter Thanksgiving and the anniversary of 9/11. pic.twitter.com/4... 2017Trump s holiday tweets are clearly not pre... 2017He s always been like this . . . the only ... his filter has been breaking down. Roy Schulz... 2017Who apart from a teenager uses the term haters? W... 2017he s a fucking 5 year old Who Knows (@rai... 2017So to all the people who voted for this a hole th... News
Drunk Bragging Trump Staffer Started Russian C... "House Intelligence Committee Chairman Devin Nu... like many of us that the Christopher Steele-dossier was what p... the dossier is not what started the investigation according to documents obtained by the New Yor... Papadopoulos wasn t just a covfefe boy for Trump as his administration has alleged. He had a mu... but none so damning as being a drunken fool in... but team Trump ran with him being merely a cof... Papadopoulos revealed to Australian diplomat A... Alexander Downer is unclear the report states. But two months later when leaked Democratic emails began appearing ... Australian officials passed the information ab... according to four current and former American ... News "December 31 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Sheriff David Clarke Becomes An Internet Joke ... "On Friday it was revealed that former Milwaukee Sheriff ... who was being considered for Homeland Security... has an email scandal of his own.In January there was a brief run-in on a plane between Cl... who he later had detained by the police for no... except that maybe his feelings were hurt. Clar... and now a search warrant has been executed by the FBI ... the former sheriff tweeted. I will continue ... Jr. (@SheriffClarke) December 30 2017He didn t stop there.BREAKING NEWS! When L... the ANTIDOTE is go right at them. Punch them i... Jr. (@SheriffClarke) December 30 2017The internet called him out.This is your l... and just because the chose not to file charges... email search warrant filed https://t.co/zcbyc4... 2017I just hope the rest of the Village People... 2017Slaw baked potatoes or French fries? pic.twitter.com/fWfXsZupxy A... 2017pic.twitter.com/ymsOBLjfxU Pendulum Swing... 2017you called your police friends to stand up... 2017Is it me with this masterful pshop of your hat which I seem to never tire of. I think it s th... 2017Are you indicating with your fingers how m... dipshit Ike Barinholtz (@ikebarinholtz) Decem... 2017ROFL. Internet tough guy with fake flair. ... 2017You re so edgy buddy. Mrs. SMH (@MRSSMH2) December 30 2017Is his break over at Applebees? Aaron (@f... 2017Are you trying to earn your still relevan... 2017make sure to hydrate drink lots of water. It s rumored that prisone... 2017Terrill Thomas the 38-year-old black man who died of thirst i... was a victim of homicide. We just thought we s... News 2017"
Trump Is So Obsessed He Even Has Obama’s Name ... "On Christmas day Donald Trump announced that he would be back ... but he is golfing for the fourth day in a row.... he ll pass Obama s first-term total by July 24... 2017 That makes what a Washington Post reporte... but everything about this administration is bi... we are working to fix the problem and not on... the coding wasn t done correctly.The website o... who has spent several days in a row at the gol... is coded to serve up the following message in ... 2017That snippet of code appears to be on all ... which the footer says is paid for by the RNC? ... 2017It s also all over https://t.co/ayBlGmk65Z... this is weird code and it s not clear it would... but who knows. Christopher Ingraham (@_cingra... 2017After the coding was called out the reference to Obama was deleted.UPDATE: The... since the actual 404 (and presumably 500) page... 2017That suggests someone at either RNC or the... 2017 The code was f-cked up.The best part abou... 2017trump s coders can t code. Nobody is surpr... 2017Donald Trump is obsessed with Obama that h... News "December 29 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Pope Francis Just Called Out Donald Trump Duri... "Pope Francis used his annual Christmas Day mes... Francis said. On this festive day let us ask the Lord for peace for Jerusalem an... and that is an issue Trump continues to fight ... as the winds of war are blowing in our world a... societal and environmental decline Christmas invites us to focus on the sign of t... especially those for whom like Jesus there is no place in the inn he said. Jesus knows well the pain of not bei... he added. May our hearts not be closed as th... their home and their land Francis said. This was no comfortable or eas... they were full of hope and expectation because... Francis said Sunday. We see the tracks of ent... but driven from their land leave behind their dear ones. Amen to that.Pho... News "December 25 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
subject \
Donald Trump Sends Out Embarrassing New Year’s... "Donald Trump just couldn t wish all Americans ... he had to give a shout out to his enemies haters and the very dishonest fake news media... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year President Angry Pants tweeted. 2018 will be ... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year. 2018 will be a g... 2017Trump s tweet went down about as welll as ... petty infantile gibberish? Only Trump! His lack of d... 2017no one likes you Calvin (@calvinstowell) ... 2017Your impeachment would make 2018 a great y... but I ll also accept regaining control of Cong... 2017Do you hear yourself talk? When you have t... 2017Who uses the word Haters in a New Years wi... 2017You can t just say happy new year? Koren ... 2017Here s Trump s New Year s Eve tweet from 2... including to my many enemies and those who hav... 2016This is nothing new for Trump. He s been d... Easter Thanksgiving and the anniversary of 9/11. pic.twitter.com/4... 2017Trump s holiday tweets are clearly not pre... 2017He s always been like this . . . the only ... his filter has been breaking down. Roy Schulz... 2017Who apart from a teenager uses the term haters? W... 2017he s a fucking 5 year old Who Knows (@rai... 2017So to all the people who voted for this a hole th... "December 31
Drunk Bragging Trump Staffer Started Russian C... "House Intelligence Committee Chairman Devin Nu... like many of us that the Christopher Steele-dossier was what p... the dossier is not what started the investigation according to documents obtained by the New Yor... Papadopoulos wasn t just a covfefe boy for Trump as his administration has alleged. He had a mu... but none so damning as being a drunken fool in... but team Trump ran with him being merely a cof... Papadopoulos revealed to Australian diplomat A... Alexander Downer is unclear the report states. But two months later when leaked Democratic emails began appearing ... Australian officials passed the information ab... according to four current and former American ... News "December 31 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Sheriff David Clarke Becomes An Internet Joke ... "On Friday it was revealed that former Milwaukee Sheriff ... who was being considered for Homeland Security... has an email scandal of his own.In January there was a brief run-in on a plane between Cl... who he later had detained by the police for no... except that maybe his feelings were hurt. Clar... and now a search warrant has been executed by the FBI ... the former sheriff tweeted. I will continue ... Jr. (@SheriffClarke) December 30 2017He didn t stop there.BREAKING NEWS! When L... the ANTIDOTE is go right at them. Punch them i... Jr. (@SheriffClarke) December 30 2017The internet called him out.This is your l... and just because the chose not to file charges... email search warrant filed https://t.co/zcbyc4... 2017I just hope the rest of the Village People... 2017Slaw baked potatoes or French fries? pic.twitter.com/fWfXsZupxy A... 2017pic.twitter.com/ymsOBLjfxU Pendulum Swing... 2017you called your police friends to stand up... 2017Is it me with this masterful pshop of your hat which I seem to never tire of. I think it s th... 2017Are you indicating with your fingers how m... dipshit Ike Barinholtz (@ikebarinholtz) Decem... 2017ROFL. Internet tough guy with fake flair. ... 2017You re so edgy buddy. Mrs. SMH (@MRSSMH2) December 30 2017Is his break over at Applebees? Aaron (@f... 2017Are you trying to earn your still relevan... 2017make sure to hydrate drink lots of water. It s rumored that prisone... 2017Terrill Thomas the 38-year-old black man who died of thirst i... was a victim of homicide. We just thought we s... News NaN
Trump Is So Obsessed He Even Has Obama’s Name ... "On Christmas day Donald Trump announced that he would be back ... but he is golfing for the fourth day in a row.... he ll pass Obama s first-term total by July 24... 2017 That makes what a Washington Post reporte... but everything about this administration is bi... we are working to fix the problem and not on... the coding wasn t done correctly.The website o... who has spent several days in a row at the gol... is coded to serve up the following message in ... 2017That snippet of code appears to be on all ... which the footer says is paid for by the RNC? ... 2017It s also all over https://t.co/ayBlGmk65Z... this is weird code and it s not clear it would... but who knows. Christopher Ingraham (@_cingra... 2017After the coding was called out the reference to Obama was deleted.UPDATE: The... since the actual 404 (and presumably 500) page... 2017That suggests someone at either RNC or the... 2017 The code was f-cked up.The best part abou... 2017trump s coders can t code. Nobody is surpr... 2017Donald Trump is obsessed with Obama that h... News "December 29 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Pope Francis Just Called Out Donald Trump Duri... "Pope Francis used his annual Christmas Day mes... Francis said. On this festive day let us ask the Lord for peace for Jerusalem an... and that is an issue Trump continues to fight ... as the winds of war are blowing in our world a... societal and environmental decline Christmas invites us to focus on the sign of t... especially those for whom like Jesus there is no place in the inn he said. Jesus knows well the pain of not bei... he added. May our hearts not be closed as th... their home and their land Francis said. This was no comfortable or eas... they were full of hope and expectation because... Francis said Sunday. We see the tracks of ent... but driven from their land leave behind their dear ones. Amen to that.Pho... News "December 25 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
date
Donald Trump Sends Out Embarrassing New Year’s... "Donald Trump just couldn t wish all Americans ... he had to give a shout out to his enemies haters and the very dishonest fake news media... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year President Angry Pants tweeted. 2018 will be ... I want to wish all of my friends supporters enemies haters and even the very dishonest Fake News Media a Happy and Healthy New Year. 2018 will be a g... 2017Trump s tweet went down about as welll as ... petty infantile gibberish? Only Trump! His lack of d... 2017no one likes you Calvin (@calvinstowell) ... 2017Your impeachment would make 2018 a great y... but I ll also accept regaining control of Cong... 2017Do you hear yourself talk? When you have t... 2017Who uses the word Haters in a New Years wi... 2017You can t just say happy new year? Koren ... 2017Here s Trump s New Year s Eve tweet from 2... including to my many enemies and those who hav... 2016This is nothing new for Trump. He s been d... Easter Thanksgiving and the anniversary of 9/11. pic.twitter.com/4... 2017Trump s holiday tweets are clearly not pre... 2017He s always been like this . . . the only ... his filter has been breaking down. Roy Schulz... 2017Who apart from a teenager uses the term haters? W... 2017he s a fucking 5 year old Who Knows (@rai... 2017So to all the people who voted for this a hole th... 2017"
Drunk Bragging Trump Staffer Started Russian C... "House Intelligence Committee Chairman Devin Nu... like many of us that the Christopher Steele-dossier was what p... the dossier is not what started the investigation according to documents obtained by the New Yor... Papadopoulos wasn t just a covfefe boy for Trump as his administration has alleged. He had a mu... but none so damning as being a drunken fool in... but team Trump ran with him being merely a cof... Papadopoulos revealed to Australian diplomat A... Alexander Downer is unclear the report states. But two months later when leaked Democratic emails began appearing ... Australian officials passed the information ab... according to four current and former American ... News "December 31 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Sheriff David Clarke Becomes An Internet Joke ... "On Friday it was revealed that former Milwaukee Sheriff ... who was being considered for Homeland Security... has an email scandal of his own.In January there was a brief run-in on a plane between Cl... who he later had detained by the police for no... except that maybe his feelings were hurt. Clar... and now a search warrant has been executed by the FBI ... the former sheriff tweeted. I will continue ... Jr. (@SheriffClarke) December 30 2017He didn t stop there.BREAKING NEWS! When L... the ANTIDOTE is go right at them. Punch them i... Jr. (@SheriffClarke) December 30 2017The internet called him out.This is your l... and just because the chose not to file charges... email search warrant filed https://t.co/zcbyc4... 2017I just hope the rest of the Village People... 2017Slaw baked potatoes or French fries? pic.twitter.com/fWfXsZupxy A... 2017pic.twitter.com/ymsOBLjfxU Pendulum Swing... 2017you called your police friends to stand up... 2017Is it me with this masterful pshop of your hat which I seem to never tire of. I think it s th... 2017Are you indicating with your fingers how m... dipshit Ike Barinholtz (@ikebarinholtz) Decem... 2017ROFL. Internet tough guy with fake flair. ... 2017You re so edgy buddy. Mrs. SMH (@MRSSMH2) December 30 2017Is his break over at Applebees? Aaron (@f... 2017Are you trying to earn your still relevan... 2017make sure to hydrate drink lots of water. It s rumored that prisone... 2017Terrill Thomas the 38-year-old black man who died of thirst i... was a victim of homicide. We just thought we s... News NaN
Trump Is So Obsessed He Even Has Obama’s Name ... "On Christmas day Donald Trump announced that he would be back ... but he is golfing for the fourth day in a row.... he ll pass Obama s first-term total by July 24... 2017 That makes what a Washington Post reporte... but everything about this administration is bi... we are working to fix the problem and not on... the coding wasn t done correctly.The website o... who has spent several days in a row at the gol... is coded to serve up the following message in ... 2017That snippet of code appears to be on all ... which the footer says is paid for by the RNC? ... 2017It s also all over https://t.co/ayBlGmk65Z... this is weird code and it s not clear it would... but who knows. Christopher Ingraham (@_cingra... 2017After the coding was called out the reference to Obama was deleted.UPDATE: The... since the actual 404 (and presumably 500) page... 2017That suggests someone at either RNC or the... 2017 The code was f-cked up.The best part abou... 2017trump s coders can t code. Nobody is surpr... 2017Donald Trump is obsessed with Obama that h... News "December 29 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
Pope Francis Just Called Out Donald Trump Duri... "Pope Francis used his annual Christmas Day mes... Francis said. On this festive day let us ask the Lord for peace for Jerusalem an... and that is an issue Trump continues to fight ... as the winds of war are blowing in our world a... societal and environmental decline Christmas invites us to focus on the sign of t... especially those for whom like Jesus there is no place in the inn he said. Jesus knows well the pain of not bei... he added. May our hearts not be closed as th... their home and their land Francis said. This was no comfortable or eas... they were full of hope and expectation because... Francis said Sunday. We see the tracks of ent... but driven from their land leave behind their dear ones. Amen to that.Pho... News "December 25 2017" NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
In [ ]:
import pandas as pd
# Load the dataset
file_path = '/content/fake.csv'
df = pd.read_csv(file_path)
In [ ]:
import re
import nltk
from nltk.corpus import stopwords
# Download stopwords
nltk.download('stopwords')
STOPWORDS = set(stopwords.words('english'))
# Define the cleaning function
def clean_text(text):
text = re.sub(r'http\S+', '', text) # Remove URLs
text = re.sub(r'[^a-zA-Z\s]', '', text) # Remove non-alphabetic characters
text = text.lower().split() # Convert to lowercase and split into words
text = [word for word in text if word not in STOPWORDS] # Remove stopwords
return ' '.join(text)
# Apply the cleaning function to the text column
df['cleaned_text'] = df['text'].apply(clean_text) # Ensure 'text' column exists
[nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Package stopwords is already up-to-date!
In [ ]:
# Encode the labels (assuming 'subject' is the label you want to encode)
df['label'] = df['label'].map({'real': 0, 'fake': 1}) # Adjust based on your label mapping
# Print encoded labels
print("Encoded Labels:\n", df['label'].head())
Encoded Labels: 0 NaN 1 NaN 2 NaN 3 NaN 4 NaN Name: label, dtype: float64
In [ ]:
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
# Tokenize the text
tokenizer = Tokenizer()
tokenizer.fit_on_texts(df['cleaned_text'])
sequences = tokenizer.texts_to_sequences(df['cleaned_text'])
X_text = pad_sequences(sequences, maxlen=50) # Adjust maxlen as needed
# Print encoded text sequences
print("Encoded Text Sequences:\n", X_text[:5])
Encoded Text Sequences:
[[ 102 13440 882 4 3338 97472 97473 345 388 10 1481 51
47 12532 1472 1902 97474 97475 345 2287 5971 2027 844 6105
8823 97476 345 4022 63 404 573 56842 345 3 645 3778
1118 5 192 112 242 435 451 227 192 63 97477 1264
17811 139]
[ 33 45 296 283 1693 113 358 410 3326 938 5649 171
904 148 184 6814 29 7243 72 367 483 59 29 223
171 1209 1559 24206 555 6814 4476 1265 817 86 7153 2003
343 1217 800 1146 38611 587 2786 7462 517 805 97480 280
4093 139]
[97503 4559 97504 345 182 4322 84 3105 9169 97505 97506 345
34 247 46077 4193 3036 537 9548 3750 1229 537 912 171
800 97507 97508 345 56846 1803 451 54 71 937 17812 5709
4380 421 1286 3162 407 913 5443 359 163 2343 97509 2643
5800 139]
[ 1168 669 7 2502 8420 31370 345 2230 11782 16010 264 111
293 6491 6964 386 921 2230 74 22 364 151 1932 97520
388 97521 345 1 97522 2230 1221 2046 2263 7199 97523 345
11 1 4323 9 328 15 24209 712 1218 1521 46078 605
5189 139]
[ 515 1040 333 538 10382 515 1097 203 2307 5424 38616 4818
1822 603 225 331 31 10649 3164 10649 2386 4577 3307 2
449 57 6107 300 624 719 371 75 57 6107 456 3737
1462 69 196 3127 1086 603 331 3550 1225 11594 56847 2502
20383 139]]
In [ ]:
import pandas as pd
# Load the dataset
file_path = '/content/fake.csv'
df = pd.read_csv(file_path)
# Convert 'date' to Unix timestamp for numeric representation
df['date'] = pd.to_datetime(df['date'], format='%B %d, %Y', errors='coerce')
df['date'] = df['date'].astype('int64') / 10**9 # Convert date to Unix timestamp
# Handle setting with copy warning by creating a copy of the DataFrame
df = df.copy()
# Print the converted date column to check
print("Converted Date Column:\n", df['date'].head())
# Add the DataFrame index as a column
df.reset_index(inplace=True)
# Extract user metadata (index and date)
X_user = df[['index', 'date']].values
# Print user metadata to check
print("User Metadata:\n", X_user[:5])
Converted Date Column: 0 1.514678e+09 1 1.514678e+09 2 1.514592e+09 3 1.514506e+09 4 1.514160e+09 Name: date, dtype: float64 User Metadata: [[0.0000000e+00 1.5146784e+09] [1.0000000e+00 1.5146784e+09] [2.0000000e+00 1.5145920e+09] [3.0000000e+00 1.5145056e+09] [4.0000000e+00 1.5141600e+09]]
In [ ]:
import networkx as nx
# Create the graph
G = nx.Graph()
# Add nodes with attributes
for i, row in df.iterrows():
G.add_node(i, text=row['text'], user_metadata=X_user[i], label=row['subject'])
# Add dummy edges for illustration purposes
for i in range(len(df)):
if i < len(df) - 1:
G.add_edge(i, i + 1) # Connect each node to the next one
# Print basic graph info
print(f"Number of nodes: {G.number_of_nodes()}")
print(f"Number of edges: {G.number_of_edges()}")
Number of nodes: 23481 Number of edges: 23480
In [ ]:
import networkx as nx
# Create the graph
G = nx.Graph()
# Add nodes with attributes
for i, row in df.iterrows():
G.add_node(i, text=row['text'], user_metadata=X_user[i], label=row['subject'])
# Add dummy edges for illustration purposes
for i in range(len(df)):
if i < len(df) - 1:
G.add_edge(i, i + 1) # Connect each node to the next one
# Print nodes with attributes
print("Nodes with attributes:")
for node in G.nodes(data=True):
print(node)
# Print edges with attributes
print("\nEdges with attributes:")
for edge in G.edges(data=True):
print(edge)
Streaming output truncated to the last 5000 lines.
(18480, 18481, {})
(18481, 18482, {})
(18482, 18483, {})
(18483, 18484, {})
(18484, 18485, {})
(18485, 18486, {})
(18486, 18487, {})
(18487, 18488, {})
(18488, 18489, {})
(18489, 18490, {})
(18490, 18491, {})
(18491, 18492, {})
(18492, 18493, {})
(18493, 18494, {})
(18494, 18495, {})
(18495, 18496, {})
(18496, 18497, {})
(18497, 18498, {})
(18498, 18499, {})
(18499, 18500, {})
(18500, 18501, {})
(18501, 18502, {})
(18502, 18503, {})
(18503, 18504, {})
(18504, 18505, {})
(18505, 18506, {})
(18506, 18507, {})
(18507, 18508, {})
(18508, 18509, {})
(18509, 18510, {})
(18510, 18511, {})
(18511, 18512, {})
(18512, 18513, {})
(18513, 18514, {})
(18514, 18515, {})
(18515, 18516, {})
(18516, 18517, {})
(18517, 18518, {})
(18518, 18519, {})
(18519, 18520, {})
(18520, 18521, {})
(18521, 18522, {})
(18522, 18523, {})
(18523, 18524, {})
(18524, 18525, {})
(18525, 18526, {})
(18526, 18527, {})
(18527, 18528, {})
(18528, 18529, {})
(18529, 18530, {})
(18530, 18531, {})
(18531, 18532, {})
(18532, 18533, {})
(18533, 18534, {})
(18534, 18535, {})
(18535, 18536, {})
(18536, 18537, {})
(18537, 18538, {})
(18538, 18539, {})
(18539, 18540, {})
(18540, 18541, {})
(18541, 18542, {})
(18542, 18543, {})
(18543, 18544, {})
(18544, 18545, {})
(18545, 18546, {})
(18546, 18547, {})
(18547, 18548, {})
(18548, 18549, {})
(18549, 18550, {})
(18550, 18551, {})
(18551, 18552, {})
(18552, 18553, {})
(18553, 18554, {})
(18554, 18555, {})
(18555, 18556, {})
(18556, 18557, {})
(18557, 18558, {})
(18558, 18559, {})
(18559, 18560, {})
(18560, 18561, {})
(18561, 18562, {})
(18562, 18563, {})
(18563, 18564, {})
(18564, 18565, {})
(18565, 18566, {})
(18566, 18567, {})
(18567, 18568, {})
(18568, 18569, {})
(18569, 18570, {})
(18570, 18571, {})
(18571, 18572, {})
(18572, 18573, {})
(18573, 18574, {})
(18574, 18575, {})
(18575, 18576, {})
(18576, 18577, {})
(18577, 18578, {})
(18578, 18579, {})
(18579, 18580, {})
(18580, 18581, {})
(18581, 18582, {})
(18582, 18583, {})
(18583, 18584, {})
(18584, 18585, {})
(18585, 18586, {})
(18586, 18587, {})
(18587, 18588, {})
(18588, 18589, {})
(18589, 18590, {})
(18590, 18591, {})
(18591, 18592, {})
(18592, 18593, {})
(18593, 18594, {})
(18594, 18595, {})
(18595, 18596, {})
(18596, 18597, {})
(18597, 18598, {})
(18598, 18599, {})
(18599, 18600, {})
(18600, 18601, {})
(18601, 18602, {})
(18602, 18603, {})
(18603, 18604, {})
(18604, 18605, {})
(18605, 18606, {})
(18606, 18607, {})
(18607, 18608, {})
(18608, 18609, {})
(18609, 18610, {})
(18610, 18611, {})
(18611, 18612, {})
(18612, 18613, {})
(18613, 18614, {})
(18614, 18615, {})
(18615, 18616, {})
(18616, 18617, {})
(18617, 18618, {})
(18618, 18619, {})
(18619, 18620, {})
(18620, 18621, {})
(18621, 18622, {})
(18622, 18623, {})
(18623, 18624, {})
(18624, 18625, {})
(18625, 18626, {})
(18626, 18627, {})
(18627, 18628, {})
(18628, 18629, {})
(18629, 18630, {})
(18630, 18631, {})
(18631, 18632, {})
(18632, 18633, {})
(18633, 18634, {})
(18634, 18635, {})
(18635, 18636, {})
(18636, 18637, {})
(18637, 18638, {})
(18638, 18639, {})
(18639, 18640, {})
(18640, 18641, {})
(18641, 18642, {})
(18642, 18643, {})
(18643, 18644, {})
(18644, 18645, {})
(18645, 18646, {})
(18646, 18647, {})
(18647, 18648, {})
(18648, 18649, {})
(18649, 18650, {})
(18650, 18651, {})
(18651, 18652, {})
(18652, 18653, {})
(18653, 18654, {})
(18654, 18655, {})
(18655, 18656, {})
(18656, 18657, {})
(18657, 18658, {})
(18658, 18659, {})
(18659, 18660, {})
(18660, 18661, {})
(18661, 18662, {})
(18662, 18663, {})
(18663, 18664, {})
(18664, 18665, {})
(18665, 18666, {})
(18666, 18667, {})
(18667, 18668, {})
(18668, 18669, {})
(18669, 18670, {})
(18670, 18671, {})
(18671, 18672, {})
(18672, 18673, {})
(18673, 18674, {})
(18674, 18675, {})
(18675, 18676, {})
(18676, 18677, {})
(18677, 18678, {})
(18678, 18679, {})
(18679, 18680, {})
(18680, 18681, {})
(18681, 18682, {})
(18682, 18683, {})
(18683, 18684, {})
(18684, 18685, {})
(18685, 18686, {})
(18686, 18687, {})
(18687, 18688, {})
(18688, 18689, {})
(18689, 18690, {})
(18690, 18691, {})
(18691, 18692, {})
(18692, 18693, {})
(18693, 18694, {})
(18694, 18695, {})
(18695, 18696, {})
(18696, 18697, {})
(18697, 18698, {})
(18698, 18699, {})
(18699, 18700, {})
(18700, 18701, {})
(18701, 18702, {})
(18702, 18703, {})
(18703, 18704, {})
(18704, 18705, {})
(18705, 18706, {})
(18706, 18707, {})
(18707, 18708, {})
(18708, 18709, {})
(18709, 18710, {})
(18710, 18711, {})
(18711, 18712, {})
(18712, 18713, {})
(18713, 18714, {})
(18714, 18715, {})
(18715, 18716, {})
(18716, 18717, {})
(18717, 18718, {})
(18718, 18719, {})
(18719, 18720, {})
(18720, 18721, {})
(18721, 18722, {})
(18722, 18723, {})
(18723, 18724, {})
(18724, 18725, {})
(18725, 18726, {})
(18726, 18727, {})
(18727, 18728, {})
(18728, 18729, {})
(18729, 18730, {})
(18730, 18731, {})
(18731, 18732, {})
(18732, 18733, {})
(18733, 18734, {})
(18734, 18735, {})
(18735, 18736, {})
(18736, 18737, {})
(18737, 18738, {})
(18738, 18739, {})
(18739, 18740, {})
(18740, 18741, {})
(18741, 18742, {})
(18742, 18743, {})
(18743, 18744, {})
(18744, 18745, {})
(18745, 18746, {})
(18746, 18747, {})
(18747, 18748, {})
(18748, 18749, {})
(18749, 18750, {})
(18750, 18751, {})
(18751, 18752, {})
(18752, 18753, {})
(18753, 18754, {})
(18754, 18755, {})
(18755, 18756, {})
(18756, 18757, {})
(18757, 18758, {})
(18758, 18759, {})
(18759, 18760, {})
(18760, 18761, {})
(18761, 18762, {})
(18762, 18763, {})
(18763, 18764, {})
(18764, 18765, {})
(18765, 18766, {})
(18766, 18767, {})
(18767, 18768, {})
(18768, 18769, {})
(18769, 18770, {})
(18770, 18771, {})
(18771, 18772, {})
(18772, 18773, {})
(18773, 18774, {})
(18774, 18775, {})
(18775, 18776, {})
(18776, 18777, {})
(18777, 18778, {})
(18778, 18779, {})
(18779, 18780, {})
(18780, 18781, {})
(18781, 18782, {})
(18782, 18783, {})
(18783, 18784, {})
(18784, 18785, {})
(18785, 18786, {})
(18786, 18787, {})
(18787, 18788, {})
(18788, 18789, {})
(18789, 18790, {})
(18790, 18791, {})
(18791, 18792, {})
(18792, 18793, {})
(18793, 18794, {})
(18794, 18795, {})
(18795, 18796, {})
(18796, 18797, {})
(18797, 18798, {})
(18798, 18799, {})
(18799, 18800, {})
(18800, 18801, {})
(18801, 18802, {})
(18802, 18803, {})
(18803, 18804, {})
(18804, 18805, {})
(18805, 18806, {})
(18806, 18807, {})
(18807, 18808, {})
(18808, 18809, {})
(18809, 18810, {})
(18810, 18811, {})
(18811, 18812, {})
(18812, 18813, {})
(18813, 18814, {})
(18814, 18815, {})
(18815, 18816, {})
(18816, 18817, {})
(18817, 18818, {})
(18818, 18819, {})
(18819, 18820, {})
(18820, 18821, {})
(18821, 18822, {})
(18822, 18823, {})
(18823, 18824, {})
(18824, 18825, {})
(18825, 18826, {})
(18826, 18827, {})
(18827, 18828, {})
(18828, 18829, {})
(18829, 18830, {})
(18830, 18831, {})
(18831, 18832, {})
(18832, 18833, {})
(18833, 18834, {})
(18834, 18835, {})
(18835, 18836, {})
(18836, 18837, {})
(18837, 18838, {})
(18838, 18839, {})
(18839, 18840, {})
(18840, 18841, {})
(18841, 18842, {})
(18842, 18843, {})
(18843, 18844, {})
(18844, 18845, {})
(18845, 18846, {})
(18846, 18847, {})
(18847, 18848, {})
(18848, 18849, {})
(18849, 18850, {})
(18850, 18851, {})
(18851, 18852, {})
(18852, 18853, {})
(18853, 18854, {})
(18854, 18855, {})
(18855, 18856, {})
(18856, 18857, {})
(18857, 18858, {})
(18858, 18859, {})
(18859, 18860, {})
(18860, 18861, {})
(18861, 18862, {})
(18862, 18863, {})
(18863, 18864, {})
(18864, 18865, {})
(18865, 18866, {})
(18866, 18867, {})
(18867, 18868, {})
(18868, 18869, {})
(18869, 18870, {})
(18870, 18871, {})
(18871, 18872, {})
(18872, 18873, {})
(18873, 18874, {})
(18874, 18875, {})
(18875, 18876, {})
(18876, 18877, {})
(18877, 18878, {})
(18878, 18879, {})
(18879, 18880, {})
(18880, 18881, {})
(18881, 18882, {})
(18882, 18883, {})
(18883, 18884, {})
(18884, 18885, {})
(18885, 18886, {})
(18886, 18887, {})
(18887, 18888, {})
(18888, 18889, {})
(18889, 18890, {})
(18890, 18891, {})
(18891, 18892, {})
(18892, 18893, {})
(18893, 18894, {})
(18894, 18895, {})
(18895, 18896, {})
(18896, 18897, {})
(18897, 18898, {})
(18898, 18899, {})
(18899, 18900, {})
(18900, 18901, {})
(18901, 18902, {})
(18902, 18903, {})
(18903, 18904, {})
(18904, 18905, {})
(18905, 18906, {})
(18906, 18907, {})
(18907, 18908, {})
(18908, 18909, {})
(18909, 18910, {})
(18910, 18911, {})
(18911, 18912, {})
(18912, 18913, {})
(18913, 18914, {})
(18914, 18915, {})
(18915, 18916, {})
(18916, 18917, {})
(18917, 18918, {})
(18918, 18919, {})
(18919, 18920, {})
(18920, 18921, {})
(18921, 18922, {})
(18922, 18923, {})
(18923, 18924, {})
(18924, 18925, {})
(18925, 18926, {})
(18926, 18927, {})
(18927, 18928, {})
(18928, 18929, {})
(18929, 18930, {})
(18930, 18931, {})
(18931, 18932, {})
(18932, 18933, {})
(18933, 18934, {})
(18934, 18935, {})
(18935, 18936, {})
(18936, 18937, {})
(18937, 18938, {})
(18938, 18939, {})
(18939, 18940, {})
(18940, 18941, {})
(18941, 18942, {})
(18942, 18943, {})
(18943, 18944, {})
(18944, 18945, {})
(18945, 18946, {})
(18946, 18947, {})
(18947, 18948, {})
(18948, 18949, {})
(18949, 18950, {})
(18950, 18951, {})
(18951, 18952, {})
(18952, 18953, {})
(18953, 18954, {})
(18954, 18955, {})
(18955, 18956, {})
(18956, 18957, {})
(18957, 18958, {})
(18958, 18959, {})
(18959, 18960, {})
(18960, 18961, {})
(18961, 18962, {})
(18962, 18963, {})
(18963, 18964, {})
(18964, 18965, {})
(18965, 18966, {})
(18966, 18967, {})
(18967, 18968, {})
(18968, 18969, {})
(18969, 18970, {})
(18970, 18971, {})
(18971, 18972, {})
(18972, 18973, {})
(18973, 18974, {})
(18974, 18975, {})
(18975, 18976, {})
(18976, 18977, {})
(18977, 18978, {})
(18978, 18979, {})
(18979, 18980, {})
(18980, 18981, {})
(18981, 18982, {})
(18982, 18983, {})
(18983, 18984, {})
(18984, 18985, {})
(18985, 18986, {})
(18986, 18987, {})
(18987, 18988, {})
(18988, 18989, {})
(18989, 18990, {})
(18990, 18991, {})
(18991, 18992, {})
(18992, 18993, {})
(18993, 18994, {})
(18994, 18995, {})
(18995, 18996, {})
(18996, 18997, {})
(18997, 18998, {})
(18998, 18999, {})
(18999, 19000, {})
(19000, 19001, {})
(19001, 19002, {})
(19002, 19003, {})
(19003, 19004, {})
(19004, 19005, {})
(19005, 19006, {})
(19006, 19007, {})
(19007, 19008, {})
(19008, 19009, {})
(19009, 19010, {})
(19010, 19011, {})
(19011, 19012, {})
(19012, 19013, {})
(19013, 19014, {})
(19014, 19015, {})
(19015, 19016, {})
(19016, 19017, {})
(19017, 19018, {})
(19018, 19019, {})
(19019, 19020, {})
(19020, 19021, {})
(19021, 19022, {})
(19022, 19023, {})
(19023, 19024, {})
(19024, 19025, {})
(19025, 19026, {})
(19026, 19027, {})
(19027, 19028, {})
(19028, 19029, {})
(19029, 19030, {})
(19030, 19031, {})
(19031, 19032, {})
(19032, 19033, {})
(19033, 19034, {})
(19034, 19035, {})
(19035, 19036, {})
(19036, 19037, {})
(19037, 19038, {})
(19038, 19039, {})
(19039, 19040, {})
(19040, 19041, {})
(19041, 19042, {})
(19042, 19043, {})
(19043, 19044, {})
(19044, 19045, {})
(19045, 19046, {})
(19046, 19047, {})
(19047, 19048, {})
(19048, 19049, {})
(19049, 19050, {})
(19050, 19051, {})
(19051, 19052, {})
(19052, 19053, {})
(19053, 19054, {})
(19054, 19055, {})
(19055, 19056, {})
(19056, 19057, {})
(19057, 19058, {})
(19058, 19059, {})
(19059, 19060, {})
(19060, 19061, {})
(19061, 19062, {})
(19062, 19063, {})
(19063, 19064, {})
(19064, 19065, {})
(19065, 19066, {})
(19066, 19067, {})
(19067, 19068, {})
(19068, 19069, {})
(19069, 19070, {})
(19070, 19071, {})
(19071, 19072, {})
(19072, 19073, {})
(19073, 19074, {})
(19074, 19075, {})
(19075, 19076, {})
(19076, 19077, {})
(19077, 19078, {})
(19078, 19079, {})
(19079, 19080, {})
(19080, 19081, {})
(19081, 19082, {})
(19082, 19083, {})
(19083, 19084, {})
(19084, 19085, {})
(19085, 19086, {})
(19086, 19087, {})
(19087, 19088, {})
(19088, 19089, {})
(19089, 19090, {})
(19090, 19091, {})
(19091, 19092, {})
(19092, 19093, {})
(19093, 19094, {})
(19094, 19095, {})
(19095, 19096, {})
(19096, 19097, {})
(19097, 19098, {})
(19098, 19099, {})
(19099, 19100, {})
(19100, 19101, {})
(19101, 19102, {})
(19102, 19103, {})
(19103, 19104, {})
(19104, 19105, {})
(19105, 19106, {})
(19106, 19107, {})
(19107, 19108, {})
(19108, 19109, {})
(19109, 19110, {})
(19110, 19111, {})
(19111, 19112, {})
(19112, 19113, {})
(19113, 19114, {})
(19114, 19115, {})
(19115, 19116, {})
(19116, 19117, {})
(19117, 19118, {})
(19118, 19119, {})
(19119, 19120, {})
(19120, 19121, {})
(19121, 19122, {})
(19122, 19123, {})
(19123, 19124, {})
(19124, 19125, {})
(19125, 19126, {})
(19126, 19127, {})
(19127, 19128, {})
(19128, 19129, {})
(19129, 19130, {})
(19130, 19131, {})
(19131, 19132, {})
(19132, 19133, {})
(19133, 19134, {})
(19134, 19135, {})
(19135, 19136, {})
(19136, 19137, {})
(19137, 19138, {})
(19138, 19139, {})
(19139, 19140, {})
(19140, 19141, {})
(19141, 19142, {})
(19142, 19143, {})
(19143, 19144, {})
(19144, 19145, {})
(19145, 19146, {})
(19146, 19147, {})
(19147, 19148, {})
(19148, 19149, {})
(19149, 19150, {})
(19150, 19151, {})
(19151, 19152, {})
(19152, 19153, {})
(19153, 19154, {})
(19154, 19155, {})
(19155, 19156, {})
(19156, 19157, {})
(19157, 19158, {})
(19158, 19159, {})
(19159, 19160, {})
(19160, 19161, {})
(19161, 19162, {})
(19162, 19163, {})
(19163, 19164, {})
(19164, 19165, {})
(19165, 19166, {})
(19166, 19167, {})
(19167, 19168, {})
(19168, 19169, {})
(19169, 19170, {})
(19170, 19171, {})
(19171, 19172, {})
(19172, 19173, {})
(19173, 19174, {})
(19174, 19175, {})
(19175, 19176, {})
(19176, 19177, {})
(19177, 19178, {})
(19178, 19179, {})
(19179, 19180, {})
(19180, 19181, {})
(19181, 19182, {})
(19182, 19183, {})
(19183, 19184, {})
(19184, 19185, {})
(19185, 19186, {})
(19186, 19187, {})
(19187, 19188, {})
(19188, 19189, {})
(19189, 19190, {})
(19190, 19191, {})
(19191, 19192, {})
(19192, 19193, {})
(19193, 19194, {})
(19194, 19195, {})
(19195, 19196, {})
(19196, 19197, {})
(19197, 19198, {})
(19198, 19199, {})
(19199, 19200, {})
(19200, 19201, {})
(19201, 19202, {})
(19202, 19203, {})
(19203, 19204, {})
(19204, 19205, {})
(19205, 19206, {})
(19206, 19207, {})
(19207, 19208, {})
(19208, 19209, {})
(19209, 19210, {})
(19210, 19211, {})
(19211, 19212, {})
(19212, 19213, {})
(19213, 19214, {})
(19214, 19215, {})
(19215, 19216, {})
(19216, 19217, {})
(19217, 19218, {})
(19218, 19219, {})
(19219, 19220, {})
(19220, 19221, {})
(19221, 19222, {})
(19222, 19223, {})
(19223, 19224, {})
(19224, 19225, {})
(19225, 19226, {})
(19226, 19227, {})
(19227, 19228, {})
(19228, 19229, {})
(19229, 19230, {})
(19230, 19231, {})
(19231, 19232, {})
(19232, 19233, {})
(19233, 19234, {})
(19234, 19235, {})
(19235, 19236, {})
(19236, 19237, {})
(19237, 19238, {})
(19238, 19239, {})
(19239, 19240, {})
(19240, 19241, {})
(19241, 19242, {})
(19242, 19243, {})
(19243, 19244, {})
(19244, 19245, {})
(19245, 19246, {})
(19246, 19247, {})
(19247, 19248, {})
(19248, 19249, {})
(19249, 19250, {})
(19250, 19251, {})
(19251, 19252, {})
(19252, 19253, {})
(19253, 19254, {})
(19254, 19255, {})
(19255, 19256, {})
(19256, 19257, {})
(19257, 19258, {})
(19258, 19259, {})
(19259, 19260, {})
(19260, 19261, {})
(19261, 19262, {})
(19262, 19263, {})
(19263, 19264, {})
(19264, 19265, {})
(19265, 19266, {})
(19266, 19267, {})
(19267, 19268, {})
(19268, 19269, {})
(19269, 19270, {})
(19270, 19271, {})
(19271, 19272, {})
(19272, 19273, {})
(19273, 19274, {})
(19274, 19275, {})
(19275, 19276, {})
(19276, 19277, {})
(19277, 19278, {})
(19278, 19279, {})
(19279, 19280, {})
(19280, 19281, {})
(19281, 19282, {})
(19282, 19283, {})
(19283, 19284, {})
(19284, 19285, {})
(19285, 19286, {})
(19286, 19287, {})
(19287, 19288, {})
(19288, 19289, {})
(19289, 19290, {})
(19290, 19291, {})
(19291, 19292, {})
(19292, 19293, {})
(19293, 19294, {})
(19294, 19295, {})
(19295, 19296, {})
(19296, 19297, {})
(19297, 19298, {})
(19298, 19299, {})
(19299, 19300, {})
(19300, 19301, {})
(19301, 19302, {})
(19302, 19303, {})
(19303, 19304, {})
(19304, 19305, {})
(19305, 19306, {})
(19306, 19307, {})
(19307, 19308, {})
(19308, 19309, {})
(19309, 19310, {})
(19310, 19311, {})
(19311, 19312, {})
(19312, 19313, {})
(19313, 19314, {})
(19314, 19315, {})
(19315, 19316, {})
(19316, 19317, {})
(19317, 19318, {})
(19318, 19319, {})
(19319, 19320, {})
(19320, 19321, {})
(19321, 19322, {})
(19322, 19323, {})
(19323, 19324, {})
(19324, 19325, {})
(19325, 19326, {})
(19326, 19327, {})
(19327, 19328, {})
(19328, 19329, {})
(19329, 19330, {})
(19330, 19331, {})
(19331, 19332, {})
(19332, 19333, {})
(19333, 19334, {})
(19334, 19335, {})
(19335, 19336, {})
(19336, 19337, {})
(19337, 19338, {})
(19338, 19339, {})
(19339, 19340, {})
(19340, 19341, {})
(19341, 19342, {})
(19342, 19343, {})
(19343, 19344, {})
(19344, 19345, {})
(19345, 19346, {})
(19346, 19347, {})
(19347, 19348, {})
(19348, 19349, {})
(19349, 19350, {})
(19350, 19351, {})
(19351, 19352, {})
(19352, 19353, {})
(19353, 19354, {})
(19354, 19355, {})
(19355, 19356, {})
(19356, 19357, {})
(19357, 19358, {})
(19358, 19359, {})
(19359, 19360, {})
(19360, 19361, {})
(19361, 19362, {})
(19362, 19363, {})
(19363, 19364, {})
(19364, 19365, {})
(19365, 19366, {})
(19366, 19367, {})
(19367, 19368, {})
(19368, 19369, {})
(19369, 19370, {})
(19370, 19371, {})
(19371, 19372, {})
(19372, 19373, {})
(19373, 19374, {})
(19374, 19375, {})
(19375, 19376, {})
(19376, 19377, {})
(19377, 19378, {})
(19378, 19379, {})
(19379, 19380, {})
(19380, 19381, {})
(19381, 19382, {})
(19382, 19383, {})
(19383, 19384, {})
(19384, 19385, {})
(19385, 19386, {})
(19386, 19387, {})
(19387, 19388, {})
(19388, 19389, {})
(19389, 19390, {})
(19390, 19391, {})
(19391, 19392, {})
(19392, 19393, {})
(19393, 19394, {})
(19394, 19395, {})
(19395, 19396, {})
(19396, 19397, {})
(19397, 19398, {})
(19398, 19399, {})
(19399, 19400, {})
(19400, 19401, {})
(19401, 19402, {})
(19402, 19403, {})
(19403, 19404, {})
(19404, 19405, {})
(19405, 19406, {})
(19406, 19407, {})
(19407, 19408, {})
(19408, 19409, {})
(19409, 19410, {})
(19410, 19411, {})
(19411, 19412, {})
(19412, 19413, {})
(19413, 19414, {})
(19414, 19415, {})
(19415, 19416, {})
(19416, 19417, {})
(19417, 19418, {})
(19418, 19419, {})
(19419, 19420, {})
(19420, 19421, {})
(19421, 19422, {})
(19422, 19423, {})
(19423, 19424, {})
(19424, 19425, {})
(19425, 19426, {})
(19426, 19427, {})
(19427, 19428, {})
(19428, 19429, {})
(19429, 19430, {})
(19430, 19431, {})
(19431, 19432, {})
(19432, 19433, {})
(19433, 19434, {})
(19434, 19435, {})
(19435, 19436, {})
(19436, 19437, {})
(19437, 19438, {})
(19438, 19439, {})
(19439, 19440, {})
(19440, 19441, {})
(19441, 19442, {})
(19442, 19443, {})
(19443, 19444, {})
(19444, 19445, {})
(19445, 19446, {})
(19446, 19447, {})
(19447, 19448, {})
(19448, 19449, {})
(19449, 19450, {})
(19450, 19451, {})
(19451, 19452, {})
(19452, 19453, {})
(19453, 19454, {})
(19454, 19455, {})
(19455, 19456, {})
(19456, 19457, {})
(19457, 19458, {})
(19458, 19459, {})
(19459, 19460, {})
(19460, 19461, {})
(19461, 19462, {})
(19462, 19463, {})
(19463, 19464, {})
(19464, 19465, {})
(19465, 19466, {})
(19466, 19467, {})
(19467, 19468, {})
(19468, 19469, {})
(19469, 19470, {})
(19470, 19471, {})
(19471, 19472, {})
(19472, 19473, {})
(19473, 19474, {})
(19474, 19475, {})
(19475, 19476, {})
(19476, 19477, {})
(19477, 19478, {})
(19478, 19479, {})
(19479, 19480, {})
(19480, 19481, {})
(19481, 19482, {})
(19482, 19483, {})
(19483, 19484, {})
(19484, 19485, {})
(19485, 19486, {})
(19486, 19487, {})
(19487, 19488, {})
(19488, 19489, {})
(19489, 19490, {})
(19490, 19491, {})
(19491, 19492, {})
(19492, 19493, {})
(19493, 19494, {})
(19494, 19495, {})
(19495, 19496, {})
(19496, 19497, {})
(19497, 19498, {})
(19498, 19499, {})
(19499, 19500, {})
(19500, 19501, {})
(19501, 19502, {})
(19502, 19503, {})
(19503, 19504, {})
(19504, 19505, {})
(19505, 19506, {})
(19506, 19507, {})
(19507, 19508, {})
(19508, 19509, {})
(19509, 19510, {})
(19510, 19511, {})
(19511, 19512, {})
(19512, 19513, {})
(19513, 19514, {})
(19514, 19515, {})
(19515, 19516, {})
(19516, 19517, {})
(19517, 19518, {})
(19518, 19519, {})
(19519, 19520, {})
(19520, 19521, {})
(19521, 19522, {})
(19522, 19523, {})
(19523, 19524, {})
(19524, 19525, {})
(19525, 19526, {})
(19526, 19527, {})
(19527, 19528, {})
(19528, 19529, {})
(19529, 19530, {})
(19530, 19531, {})
(19531, 19532, {})
(19532, 19533, {})
(19533, 19534, {})
(19534, 19535, {})
(19535, 19536, {})
(19536, 19537, {})
(19537, 19538, {})
(19538, 19539, {})
(19539, 19540, {})
(19540, 19541, {})
(19541, 19542, {})
(19542, 19543, {})
(19543, 19544, {})
(19544, 19545, {})
(19545, 19546, {})
(19546, 19547, {})
(19547, 19548, {})
(19548, 19549, {})
(19549, 19550, {})
(19550, 19551, {})
(19551, 19552, {})
(19552, 19553, {})
(19553, 19554, {})
(19554, 19555, {})
(19555, 19556, {})
(19556, 19557, {})
(19557, 19558, {})
(19558, 19559, {})
(19559, 19560, {})
(19560, 19561, {})
(19561, 19562, {})
(19562, 19563, {})
(19563, 19564, {})
(19564, 19565, {})
(19565, 19566, {})
(19566, 19567, {})
(19567, 19568, {})
(19568, 19569, {})
(19569, 19570, {})
(19570, 19571, {})
(19571, 19572, {})
(19572, 19573, {})
(19573, 19574, {})
(19574, 19575, {})
(19575, 19576, {})
(19576, 19577, {})
(19577, 19578, {})
(19578, 19579, {})
(19579, 19580, {})
(19580, 19581, {})
(19581, 19582, {})
(19582, 19583, {})
(19583, 19584, {})
(19584, 19585, {})
(19585, 19586, {})
(19586, 19587, {})
(19587, 19588, {})
(19588, 19589, {})
(19589, 19590, {})
(19590, 19591, {})
(19591, 19592, {})
(19592, 19593, {})
(19593, 19594, {})
(19594, 19595, {})
(19595, 19596, {})
(19596, 19597, {})
(19597, 19598, {})
(19598, 19599, {})
(19599, 19600, {})
(19600, 19601, {})
(19601, 19602, {})
(19602, 19603, {})
(19603, 19604, {})
(19604, 19605, {})
(19605, 19606, {})
(19606, 19607, {})
(19607, 19608, {})
(19608, 19609, {})
(19609, 19610, {})
(19610, 19611, {})
(19611, 19612, {})
(19612, 19613, {})
(19613, 19614, {})
(19614, 19615, {})
(19615, 19616, {})
(19616, 19617, {})
(19617, 19618, {})
(19618, 19619, {})
(19619, 19620, {})
(19620, 19621, {})
(19621, 19622, {})
(19622, 19623, {})
(19623, 19624, {})
(19624, 19625, {})
(19625, 19626, {})
(19626, 19627, {})
(19627, 19628, {})
(19628, 19629, {})
(19629, 19630, {})
(19630, 19631, {})
(19631, 19632, {})
(19632, 19633, {})
(19633, 19634, {})
(19634, 19635, {})
(19635, 19636, {})
(19636, 19637, {})
(19637, 19638, {})
(19638, 19639, {})
(19639, 19640, {})
(19640, 19641, {})
(19641, 19642, {})
(19642, 19643, {})
(19643, 19644, {})
(19644, 19645, {})
(19645, 19646, {})
(19646, 19647, {})
(19647, 19648, {})
(19648, 19649, {})
(19649, 19650, {})
(19650, 19651, {})
(19651, 19652, {})
(19652, 19653, {})
(19653, 19654, {})
(19654, 19655, {})
(19655, 19656, {})
(19656, 19657, {})
(19657, 19658, {})
(19658, 19659, {})
(19659, 19660, {})
(19660, 19661, {})
(19661, 19662, {})
(19662, 19663, {})
(19663, 19664, {})
(19664, 19665, {})
(19665, 19666, {})
(19666, 19667, {})
(19667, 19668, {})
(19668, 19669, {})
(19669, 19670, {})
(19670, 19671, {})
(19671, 19672, {})
(19672, 19673, {})
(19673, 19674, {})
(19674, 19675, {})
(19675, 19676, {})
(19676, 19677, {})
(19677, 19678, {})
(19678, 19679, {})
(19679, 19680, {})
(19680, 19681, {})
(19681, 19682, {})
(19682, 19683, {})
(19683, 19684, {})
(19684, 19685, {})
(19685, 19686, {})
(19686, 19687, {})
(19687, 19688, {})
(19688, 19689, {})
(19689, 19690, {})
(19690, 19691, {})
(19691, 19692, {})
(19692, 19693, {})
(19693, 19694, {})
(19694, 19695, {})
(19695, 19696, {})
(19696, 19697, {})
(19697, 19698, {})
(19698, 19699, {})
(19699, 19700, {})
(19700, 19701, {})
(19701, 19702, {})
(19702, 19703, {})
(19703, 19704, {})
(19704, 19705, {})
(19705, 19706, {})
(19706, 19707, {})
(19707, 19708, {})
(19708, 19709, {})
(19709, 19710, {})
(19710, 19711, {})
(19711, 19712, {})
(19712, 19713, {})
(19713, 19714, {})
(19714, 19715, {})
(19715, 19716, {})
(19716, 19717, {})
(19717, 19718, {})
(19718, 19719, {})
(19719, 19720, {})
(19720, 19721, {})
(19721, 19722, {})
(19722, 19723, {})
(19723, 19724, {})
(19724, 19725, {})
(19725, 19726, {})
(19726, 19727, {})
(19727, 19728, {})
(19728, 19729, {})
(19729, 19730, {})
(19730, 19731, {})
(19731, 19732, {})
(19732, 19733, {})
(19733, 19734, {})
(19734, 19735, {})
(19735, 19736, {})
(19736, 19737, {})
(19737, 19738, {})
(19738, 19739, {})
(19739, 19740, {})
(19740, 19741, {})
(19741, 19742, {})
(19742, 19743, {})
(19743, 19744, {})
(19744, 19745, {})
(19745, 19746, {})
(19746, 19747, {})
(19747, 19748, {})
(19748, 19749, {})
(19749, 19750, {})
(19750, 19751, {})
(19751, 19752, {})
(19752, 19753, {})
(19753, 19754, {})
(19754, 19755, {})
(19755, 19756, {})
(19756, 19757, {})
(19757, 19758, {})
(19758, 19759, {})
(19759, 19760, {})
(19760, 19761, {})
(19761, 19762, {})
(19762, 19763, {})
(19763, 19764, {})
(19764, 19765, {})
(19765, 19766, {})
(19766, 19767, {})
(19767, 19768, {})
(19768, 19769, {})
(19769, 19770, {})
(19770, 19771, {})
(19771, 19772, {})
(19772, 19773, {})
(19773, 19774, {})
(19774, 19775, {})
(19775, 19776, {})
(19776, 19777, {})
(19777, 19778, {})
(19778, 19779, {})
(19779, 19780, {})
(19780, 19781, {})
(19781, 19782, {})
(19782, 19783, {})
(19783, 19784, {})
(19784, 19785, {})
(19785, 19786, {})
(19786, 19787, {})
(19787, 19788, {})
(19788, 19789, {})
(19789, 19790, {})
(19790, 19791, {})
(19791, 19792, {})
(19792, 19793, {})
(19793, 19794, {})
(19794, 19795, {})
(19795, 19796, {})
(19796, 19797, {})
(19797, 19798, {})
(19798, 19799, {})
(19799, 19800, {})
(19800, 19801, {})
(19801, 19802, {})
(19802, 19803, {})
(19803, 19804, {})
(19804, 19805, {})
(19805, 19806, {})
(19806, 19807, {})
(19807, 19808, {})
(19808, 19809, {})
(19809, 19810, {})
(19810, 19811, {})
(19811, 19812, {})
(19812, 19813, {})
(19813, 19814, {})
(19814, 19815, {})
(19815, 19816, {})
(19816, 19817, {})
(19817, 19818, {})
(19818, 19819, {})
(19819, 19820, {})
(19820, 19821, {})
(19821, 19822, {})
(19822, 19823, {})
(19823, 19824, {})
(19824, 19825, {})
(19825, 19826, {})
(19826, 19827, {})
(19827, 19828, {})
(19828, 19829, {})
(19829, 19830, {})
(19830, 19831, {})
(19831, 19832, {})
(19832, 19833, {})
(19833, 19834, {})
(19834, 19835, {})
(19835, 19836, {})
(19836, 19837, {})
(19837, 19838, {})
(19838, 19839, {})
(19839, 19840, {})
(19840, 19841, {})
(19841, 19842, {})
(19842, 19843, {})
(19843, 19844, {})
(19844, 19845, {})
(19845, 19846, {})
(19846, 19847, {})
(19847, 19848, {})
(19848, 19849, {})
(19849, 19850, {})
(19850, 19851, {})
(19851, 19852, {})
(19852, 19853, {})
(19853, 19854, {})
(19854, 19855, {})
(19855, 19856, {})
(19856, 19857, {})
(19857, 19858, {})
(19858, 19859, {})
(19859, 19860, {})
(19860, 19861, {})
(19861, 19862, {})
(19862, 19863, {})
(19863, 19864, {})
(19864, 19865, {})
(19865, 19866, {})
(19866, 19867, {})
(19867, 19868, {})
(19868, 19869, {})
(19869, 19870, {})
(19870, 19871, {})
(19871, 19872, {})
(19872, 19873, {})
(19873, 19874, {})
(19874, 19875, {})
(19875, 19876, {})
(19876, 19877, {})
(19877, 19878, {})
(19878, 19879, {})
(19879, 19880, {})
(19880, 19881, {})
(19881, 19882, {})
(19882, 19883, {})
(19883, 19884, {})
(19884, 19885, {})
(19885, 19886, {})
(19886, 19887, {})
(19887, 19888, {})
(19888, 19889, {})
(19889, 19890, {})
(19890, 19891, {})
(19891, 19892, {})
(19892, 19893, {})
(19893, 19894, {})
(19894, 19895, {})
(19895, 19896, {})
(19896, 19897, {})
(19897, 19898, {})
(19898, 19899, {})
(19899, 19900, {})
(19900, 19901, {})
(19901, 19902, {})
(19902, 19903, {})
(19903, 19904, {})
(19904, 19905, {})
(19905, 19906, {})
(19906, 19907, {})
(19907, 19908, {})
(19908, 19909, {})
(19909, 19910, {})
(19910, 19911, {})
(19911, 19912, {})
(19912, 19913, {})
(19913, 19914, {})
(19914, 19915, {})
(19915, 19916, {})
(19916, 19917, {})
(19917, 19918, {})
(19918, 19919, {})
(19919, 19920, {})
(19920, 19921, {})
(19921, 19922, {})
(19922, 19923, {})
(19923, 19924, {})
(19924, 19925, {})
(19925, 19926, {})
(19926, 19927, {})
(19927, 19928, {})
(19928, 19929, {})
(19929, 19930, {})
(19930, 19931, {})
(19931, 19932, {})
(19932, 19933, {})
(19933, 19934, {})
(19934, 19935, {})
(19935, 19936, {})
(19936, 19937, {})
(19937, 19938, {})
(19938, 19939, {})
(19939, 19940, {})
(19940, 19941, {})
(19941, 19942, {})
(19942, 19943, {})
(19943, 19944, {})
(19944, 19945, {})
(19945, 19946, {})
(19946, 19947, {})
(19947, 19948, {})
(19948, 19949, {})
(19949, 19950, {})
(19950, 19951, {})
(19951, 19952, {})
(19952, 19953, {})
(19953, 19954, {})
(19954, 19955, {})
(19955, 19956, {})
(19956, 19957, {})
(19957, 19958, {})
(19958, 19959, {})
(19959, 19960, {})
(19960, 19961, {})
(19961, 19962, {})
(19962, 19963, {})
(19963, 19964, {})
(19964, 19965, {})
(19965, 19966, {})
(19966, 19967, {})
(19967, 19968, {})
(19968, 19969, {})
(19969, 19970, {})
(19970, 19971, {})
(19971, 19972, {})
(19972, 19973, {})
(19973, 19974, {})
(19974, 19975, {})
(19975, 19976, {})
(19976, 19977, {})
(19977, 19978, {})
(19978, 19979, {})
(19979, 19980, {})
(19980, 19981, {})
(19981, 19982, {})
(19982, 19983, {})
(19983, 19984, {})
(19984, 19985, {})
(19985, 19986, {})
(19986, 19987, {})
(19987, 19988, {})
(19988, 19989, {})
(19989, 19990, {})
(19990, 19991, {})
(19991, 19992, {})
(19992, 19993, {})
(19993, 19994, {})
(19994, 19995, {})
(19995, 19996, {})
(19996, 19997, {})
(19997, 19998, {})
(19998, 19999, {})
(19999, 20000, {})
(20000, 20001, {})
(20001, 20002, {})
(20002, 20003, {})
(20003, 20004, {})
(20004, 20005, {})
(20005, 20006, {})
(20006, 20007, {})
(20007, 20008, {})
(20008, 20009, {})
(20009, 20010, {})
(20010, 20011, {})
(20011, 20012, {})
(20012, 20013, {})
(20013, 20014, {})
(20014, 20015, {})
(20015, 20016, {})
(20016, 20017, {})
(20017, 20018, {})
(20018, 20019, {})
(20019, 20020, {})
(20020, 20021, {})
(20021, 20022, {})
(20022, 20023, {})
(20023, 20024, {})
(20024, 20025, {})
(20025, 20026, {})
(20026, 20027, {})
(20027, 20028, {})
(20028, 20029, {})
(20029, 20030, {})
(20030, 20031, {})
(20031, 20032, {})
(20032, 20033, {})
(20033, 20034, {})
(20034, 20035, {})
(20035, 20036, {})
(20036, 20037, {})
(20037, 20038, {})
(20038, 20039, {})
(20039, 20040, {})
(20040, 20041, {})
(20041, 20042, {})
(20042, 20043, {})
(20043, 20044, {})
(20044, 20045, {})
(20045, 20046, {})
(20046, 20047, {})
(20047, 20048, {})
(20048, 20049, {})
(20049, 20050, {})
(20050, 20051, {})
(20051, 20052, {})
(20052, 20053, {})
(20053, 20054, {})
(20054, 20055, {})
(20055, 20056, {})
(20056, 20057, {})
(20057, 20058, {})
(20058, 20059, {})
(20059, 20060, {})
(20060, 20061, {})
(20061, 20062, {})
(20062, 20063, {})
(20063, 20064, {})
(20064, 20065, {})
(20065, 20066, {})
(20066, 20067, {})
(20067, 20068, {})
(20068, 20069, {})
(20069, 20070, {})
(20070, 20071, {})
(20071, 20072, {})
(20072, 20073, {})
(20073, 20074, {})
(20074, 20075, {})
(20075, 20076, {})
(20076, 20077, {})
(20077, 20078, {})
(20078, 20079, {})
(20079, 20080, {})
(20080, 20081, {})
(20081, 20082, {})
(20082, 20083, {})
(20083, 20084, {})
(20084, 20085, {})
(20085, 20086, {})
(20086, 20087, {})
(20087, 20088, {})
(20088, 20089, {})
(20089, 20090, {})
(20090, 20091, {})
(20091, 20092, {})
(20092, 20093, {})
(20093, 20094, {})
(20094, 20095, {})
(20095, 20096, {})
(20096, 20097, {})
(20097, 20098, {})
(20098, 20099, {})
(20099, 20100, {})
(20100, 20101, {})
(20101, 20102, {})
(20102, 20103, {})
(20103, 20104, {})
(20104, 20105, {})
(20105, 20106, {})
(20106, 20107, {})
(20107, 20108, {})
(20108, 20109, {})
(20109, 20110, {})
(20110, 20111, {})
(20111, 20112, {})
(20112, 20113, {})
(20113, 20114, {})
(20114, 20115, {})
(20115, 20116, {})
(20116, 20117, {})
(20117, 20118, {})
(20118, 20119, {})
(20119, 20120, {})
(20120, 20121, {})
(20121, 20122, {})
(20122, 20123, {})
(20123, 20124, {})
(20124, 20125, {})
(20125, 20126, {})
(20126, 20127, {})
(20127, 20128, {})
(20128, 20129, {})
(20129, 20130, {})
(20130, 20131, {})
(20131, 20132, {})
(20132, 20133, {})
(20133, 20134, {})
(20134, 20135, {})
(20135, 20136, {})
(20136, 20137, {})
(20137, 20138, {})
(20138, 20139, {})
(20139, 20140, {})
(20140, 20141, {})
(20141, 20142, {})
(20142, 20143, {})
(20143, 20144, {})
(20144, 20145, {})
(20145, 20146, {})
(20146, 20147, {})
(20147, 20148, {})
(20148, 20149, {})
(20149, 20150, {})
(20150, 20151, {})
(20151, 20152, {})
(20152, 20153, {})
(20153, 20154, {})
(20154, 20155, {})
(20155, 20156, {})
(20156, 20157, {})
(20157, 20158, {})
(20158, 20159, {})
(20159, 20160, {})
(20160, 20161, {})
(20161, 20162, {})
(20162, 20163, {})
(20163, 20164, {})
(20164, 20165, {})
(20165, 20166, {})
(20166, 20167, {})
(20167, 20168, {})
(20168, 20169, {})
(20169, 20170, {})
(20170, 20171, {})
(20171, 20172, {})
(20172, 20173, {})
(20173, 20174, {})
(20174, 20175, {})
(20175, 20176, {})
(20176, 20177, {})
(20177, 20178, {})
(20178, 20179, {})
(20179, 20180, {})
(20180, 20181, {})
(20181, 20182, {})
(20182, 20183, {})
(20183, 20184, {})
(20184, 20185, {})
(20185, 20186, {})
(20186, 20187, {})
(20187, 20188, {})
(20188, 20189, {})
(20189, 20190, {})
(20190, 20191, {})
(20191, 20192, {})
(20192, 20193, {})
(20193, 20194, {})
(20194, 20195, {})
(20195, 20196, {})
(20196, 20197, {})
(20197, 20198, {})
(20198, 20199, {})
(20199, 20200, {})
(20200, 20201, {})
(20201, 20202, {})
(20202, 20203, {})
(20203, 20204, {})
(20204, 20205, {})
(20205, 20206, {})
(20206, 20207, {})
(20207, 20208, {})
(20208, 20209, {})
(20209, 20210, {})
(20210, 20211, {})
(20211, 20212, {})
(20212, 20213, {})
(20213, 20214, {})
(20214, 20215, {})
(20215, 20216, {})
(20216, 20217, {})
(20217, 20218, {})
(20218, 20219, {})
(20219, 20220, {})
(20220, 20221, {})
(20221, 20222, {})
(20222, 20223, {})
(20223, 20224, {})
(20224, 20225, {})
(20225, 20226, {})
(20226, 20227, {})
(20227, 20228, {})
(20228, 20229, {})
(20229, 20230, {})
(20230, 20231, {})
(20231, 20232, {})
(20232, 20233, {})
(20233, 20234, {})
(20234, 20235, {})
(20235, 20236, {})
(20236, 20237, {})
(20237, 20238, {})
(20238, 20239, {})
(20239, 20240, {})
(20240, 20241, {})
(20241, 20242, {})
(20242, 20243, {})
(20243, 20244, {})
(20244, 20245, {})
(20245, 20246, {})
(20246, 20247, {})
(20247, 20248, {})
(20248, 20249, {})
(20249, 20250, {})
(20250, 20251, {})
(20251, 20252, {})
(20252, 20253, {})
(20253, 20254, {})
(20254, 20255, {})
(20255, 20256, {})
(20256, 20257, {})
(20257, 20258, {})
(20258, 20259, {})
(20259, 20260, {})
(20260, 20261, {})
(20261, 20262, {})
(20262, 20263, {})
(20263, 20264, {})
(20264, 20265, {})
(20265, 20266, {})
(20266, 20267, {})
(20267, 20268, {})
(20268, 20269, {})
(20269, 20270, {})
(20270, 20271, {})
(20271, 20272, {})
(20272, 20273, {})
(20273, 20274, {})
(20274, 20275, {})
(20275, 20276, {})
(20276, 20277, {})
(20277, 20278, {})
(20278, 20279, {})
(20279, 20280, {})
(20280, 20281, {})
(20281, 20282, {})
(20282, 20283, {})
(20283, 20284, {})
(20284, 20285, {})
(20285, 20286, {})
(20286, 20287, {})
(20287, 20288, {})
(20288, 20289, {})
(20289, 20290, {})
(20290, 20291, {})
(20291, 20292, {})
(20292, 20293, {})
(20293, 20294, {})
(20294, 20295, {})
(20295, 20296, {})
(20296, 20297, {})
(20297, 20298, {})
(20298, 20299, {})
(20299, 20300, {})
(20300, 20301, {})
(20301, 20302, {})
(20302, 20303, {})
(20303, 20304, {})
(20304, 20305, {})
(20305, 20306, {})
(20306, 20307, {})
(20307, 20308, {})
(20308, 20309, {})
(20309, 20310, {})
(20310, 20311, {})
(20311, 20312, {})
(20312, 20313, {})
(20313, 20314, {})
(20314, 20315, {})
(20315, 20316, {})
(20316, 20317, {})
(20317, 20318, {})
(20318, 20319, {})
(20319, 20320, {})
(20320, 20321, {})
(20321, 20322, {})
(20322, 20323, {})
(20323, 20324, {})
(20324, 20325, {})
(20325, 20326, {})
(20326, 20327, {})
(20327, 20328, {})
(20328, 20329, {})
(20329, 20330, {})
(20330, 20331, {})
(20331, 20332, {})
(20332, 20333, {})
(20333, 20334, {})
(20334, 20335, {})
(20335, 20336, {})
(20336, 20337, {})
(20337, 20338, {})
(20338, 20339, {})
(20339, 20340, {})
(20340, 20341, {})
(20341, 20342, {})
(20342, 20343, {})
(20343, 20344, {})
(20344, 20345, {})
(20345, 20346, {})
(20346, 20347, {})
(20347, 20348, {})
(20348, 20349, {})
(20349, 20350, {})
(20350, 20351, {})
(20351, 20352, {})
(20352, 20353, {})
(20353, 20354, {})
(20354, 20355, {})
(20355, 20356, {})
(20356, 20357, {})
(20357, 20358, {})
(20358, 20359, {})
(20359, 20360, {})
(20360, 20361, {})
(20361, 20362, {})
(20362, 20363, {})
(20363, 20364, {})
(20364, 20365, {})
(20365, 20366, {})
(20366, 20367, {})
(20367, 20368, {})
(20368, 20369, {})
(20369, 20370, {})
(20370, 20371, {})
(20371, 20372, {})
(20372, 20373, {})
(20373, 20374, {})
(20374, 20375, {})
(20375, 20376, {})
(20376, 20377, {})
(20377, 20378, {})
(20378, 20379, {})
(20379, 20380, {})
(20380, 20381, {})
(20381, 20382, {})
(20382, 20383, {})
(20383, 20384, {})
(20384, 20385, {})
(20385, 20386, {})
(20386, 20387, {})
(20387, 20388, {})
(20388, 20389, {})
(20389, 20390, {})
(20390, 20391, {})
(20391, 20392, {})
(20392, 20393, {})
(20393, 20394, {})
(20394, 20395, {})
(20395, 20396, {})
(20396, 20397, {})
(20397, 20398, {})
(20398, 20399, {})
(20399, 20400, {})
(20400, 20401, {})
(20401, 20402, {})
(20402, 20403, {})
(20403, 20404, {})
(20404, 20405, {})
(20405, 20406, {})
(20406, 20407, {})
(20407, 20408, {})
(20408, 20409, {})
(20409, 20410, {})
(20410, 20411, {})
(20411, 20412, {})
(20412, 20413, {})
(20413, 20414, {})
(20414, 20415, {})
(20415, 20416, {})
(20416, 20417, {})
(20417, 20418, {})
(20418, 20419, {})
(20419, 20420, {})
(20420, 20421, {})
(20421, 20422, {})
(20422, 20423, {})
(20423, 20424, {})
(20424, 20425, {})
(20425, 20426, {})
(20426, 20427, {})
(20427, 20428, {})
(20428, 20429, {})
(20429, 20430, {})
(20430, 20431, {})
(20431, 20432, {})
(20432, 20433, {})
(20433, 20434, {})
(20434, 20435, {})
(20435, 20436, {})
(20436, 20437, {})
(20437, 20438, {})
(20438, 20439, {})
(20439, 20440, {})
(20440, 20441, {})
(20441, 20442, {})
(20442, 20443, {})
(20443, 20444, {})
(20444, 20445, {})
(20445, 20446, {})
(20446, 20447, {})
(20447, 20448, {})
(20448, 20449, {})
(20449, 20450, {})
(20450, 20451, {})
(20451, 20452, {})
(20452, 20453, {})
(20453, 20454, {})
(20454, 20455, {})
(20455, 20456, {})
(20456, 20457, {})
(20457, 20458, {})
(20458, 20459, {})
(20459, 20460, {})
(20460, 20461, {})
(20461, 20462, {})
(20462, 20463, {})
(20463, 20464, {})
(20464, 20465, {})
(20465, 20466, {})
(20466, 20467, {})
(20467, 20468, {})
(20468, 20469, {})
(20469, 20470, {})
(20470, 20471, {})
(20471, 20472, {})
(20472, 20473, {})
(20473, 20474, {})
(20474, 20475, {})
(20475, 20476, {})
(20476, 20477, {})
(20477, 20478, {})
(20478, 20479, {})
(20479, 20480, {})
(20480, 20481, {})
(20481, 20482, {})
(20482, 20483, {})
(20483, 20484, {})
(20484, 20485, {})
(20485, 20486, {})
(20486, 20487, {})
(20487, 20488, {})
(20488, 20489, {})
(20489, 20490, {})
(20490, 20491, {})
(20491, 20492, {})
(20492, 20493, {})
(20493, 20494, {})
(20494, 20495, {})
(20495, 20496, {})
(20496, 20497, {})
(20497, 20498, {})
(20498, 20499, {})
(20499, 20500, {})
(20500, 20501, {})
(20501, 20502, {})
(20502, 20503, {})
(20503, 20504, {})
(20504, 20505, {})
(20505, 20506, {})
(20506, 20507, {})
(20507, 20508, {})
(20508, 20509, {})
(20509, 20510, {})
(20510, 20511, {})
(20511, 20512, {})
(20512, 20513, {})
(20513, 20514, {})
(20514, 20515, {})
(20515, 20516, {})
(20516, 20517, {})
(20517, 20518, {})
(20518, 20519, {})
(20519, 20520, {})
(20520, 20521, {})
(20521, 20522, {})
(20522, 20523, {})
(20523, 20524, {})
(20524, 20525, {})
(20525, 20526, {})
(20526, 20527, {})
(20527, 20528, {})
(20528, 20529, {})
(20529, 20530, {})
(20530, 20531, {})
(20531, 20532, {})
(20532, 20533, {})
(20533, 20534, {})
(20534, 20535, {})
(20535, 20536, {})
(20536, 20537, {})
(20537, 20538, {})
(20538, 20539, {})
(20539, 20540, {})
(20540, 20541, {})
(20541, 20542, {})
(20542, 20543, {})
(20543, 20544, {})
(20544, 20545, {})
(20545, 20546, {})
(20546, 20547, {})
(20547, 20548, {})
(20548, 20549, {})
(20549, 20550, {})
(20550, 20551, {})
(20551, 20552, {})
(20552, 20553, {})
(20553, 20554, {})
(20554, 20555, {})
(20555, 20556, {})
(20556, 20557, {})
(20557, 20558, {})
(20558, 20559, {})
(20559, 20560, {})
(20560, 20561, {})
(20561, 20562, {})
(20562, 20563, {})
(20563, 20564, {})
(20564, 20565, {})
(20565, 20566, {})
(20566, 20567, {})
(20567, 20568, {})
(20568, 20569, {})
(20569, 20570, {})
(20570, 20571, {})
(20571, 20572, {})
(20572, 20573, {})
(20573, 20574, {})
(20574, 20575, {})
(20575, 20576, {})
(20576, 20577, {})
(20577, 20578, {})
(20578, 20579, {})
(20579, 20580, {})
(20580, 20581, {})
(20581, 20582, {})
(20582, 20583, {})
(20583, 20584, {})
(20584, 20585, {})
(20585, 20586, {})
(20586, 20587, {})
(20587, 20588, {})
(20588, 20589, {})
(20589, 20590, {})
(20590, 20591, {})
(20591, 20592, {})
(20592, 20593, {})
(20593, 20594, {})
(20594, 20595, {})
(20595, 20596, {})
(20596, 20597, {})
(20597, 20598, {})
(20598, 20599, {})
(20599, 20600, {})
(20600, 20601, {})
(20601, 20602, {})
(20602, 20603, {})
(20603, 20604, {})
(20604, 20605, {})
(20605, 20606, {})
(20606, 20607, {})
(20607, 20608, {})
(20608, 20609, {})
(20609, 20610, {})
(20610, 20611, {})
(20611, 20612, {})
(20612, 20613, {})
(20613, 20614, {})
(20614, 20615, {})
(20615, 20616, {})
(20616, 20617, {})
(20617, 20618, {})
(20618, 20619, {})
(20619, 20620, {})
(20620, 20621, {})
(20621, 20622, {})
(20622, 20623, {})
(20623, 20624, {})
(20624, 20625, {})
(20625, 20626, {})
(20626, 20627, {})
(20627, 20628, {})
(20628, 20629, {})
(20629, 20630, {})
(20630, 20631, {})
(20631, 20632, {})
(20632, 20633, {})
(20633, 20634, {})
(20634, 20635, {})
(20635, 20636, {})
(20636, 20637, {})
(20637, 20638, {})
(20638, 20639, {})
(20639, 20640, {})
(20640, 20641, {})
(20641, 20642, {})
(20642, 20643, {})
(20643, 20644, {})
(20644, 20645, {})
(20645, 20646, {})
(20646, 20647, {})
(20647, 20648, {})
(20648, 20649, {})
(20649, 20650, {})
(20650, 20651, {})
(20651, 20652, {})
(20652, 20653, {})
(20653, 20654, {})
(20654, 20655, {})
(20655, 20656, {})
(20656, 20657, {})
(20657, 20658, {})
(20658, 20659, {})
(20659, 20660, {})
(20660, 20661, {})
(20661, 20662, {})
(20662, 20663, {})
(20663, 20664, {})
(20664, 20665, {})
(20665, 20666, {})
(20666, 20667, {})
(20667, 20668, {})
(20668, 20669, {})
(20669, 20670, {})
(20670, 20671, {})
(20671, 20672, {})
(20672, 20673, {})
(20673, 20674, {})
(20674, 20675, {})
(20675, 20676, {})
(20676, 20677, {})
(20677, 20678, {})
(20678, 20679, {})
(20679, 20680, {})
(20680, 20681, {})
(20681, 20682, {})
(20682, 20683, {})
(20683, 20684, {})
(20684, 20685, {})
(20685, 20686, {})
(20686, 20687, {})
(20687, 20688, {})
(20688, 20689, {})
(20689, 20690, {})
(20690, 20691, {})
(20691, 20692, {})
(20692, 20693, {})
(20693, 20694, {})
(20694, 20695, {})
(20695, 20696, {})
(20696, 20697, {})
(20697, 20698, {})
(20698, 20699, {})
(20699, 20700, {})
(20700, 20701, {})
(20701, 20702, {})
(20702, 20703, {})
(20703, 20704, {})
(20704, 20705, {})
(20705, 20706, {})
(20706, 20707, {})
(20707, 20708, {})
(20708, 20709, {})
(20709, 20710, {})
(20710, 20711, {})
(20711, 20712, {})
(20712, 20713, {})
(20713, 20714, {})
(20714, 20715, {})
(20715, 20716, {})
(20716, 20717, {})
(20717, 20718, {})
(20718, 20719, {})
(20719, 20720, {})
(20720, 20721, {})
(20721, 20722, {})
(20722, 20723, {})
(20723, 20724, {})
(20724, 20725, {})
(20725, 20726, {})
(20726, 20727, {})
(20727, 20728, {})
(20728, 20729, {})
(20729, 20730, {})
(20730, 20731, {})
(20731, 20732, {})
(20732, 20733, {})
(20733, 20734, {})
(20734, 20735, {})
(20735, 20736, {})
(20736, 20737, {})
(20737, 20738, {})
(20738, 20739, {})
(20739, 20740, {})
(20740, 20741, {})
(20741, 20742, {})
(20742, 20743, {})
(20743, 20744, {})
(20744, 20745, {})
(20745, 20746, {})
(20746, 20747, {})
(20747, 20748, {})
(20748, 20749, {})
(20749, 20750, {})
(20750, 20751, {})
(20751, 20752, {})
(20752, 20753, {})
(20753, 20754, {})
(20754, 20755, {})
(20755, 20756, {})
(20756, 20757, {})
(20757, 20758, {})
(20758, 20759, {})
(20759, 20760, {})
(20760, 20761, {})
(20761, 20762, {})
(20762, 20763, {})
(20763, 20764, {})
(20764, 20765, {})
(20765, 20766, {})
(20766, 20767, {})
(20767, 20768, {})
(20768, 20769, {})
(20769, 20770, {})
(20770, 20771, {})
(20771, 20772, {})
(20772, 20773, {})
(20773, 20774, {})
(20774, 20775, {})
(20775, 20776, {})
(20776, 20777, {})
(20777, 20778, {})
(20778, 20779, {})
(20779, 20780, {})
(20780, 20781, {})
(20781, 20782, {})
(20782, 20783, {})
(20783, 20784, {})
(20784, 20785, {})
(20785, 20786, {})
(20786, 20787, {})
(20787, 20788, {})
(20788, 20789, {})
(20789, 20790, {})
(20790, 20791, {})
(20791, 20792, {})
(20792, 20793, {})
(20793, 20794, {})
(20794, 20795, {})
(20795, 20796, {})
(20796, 20797, {})
(20797, 20798, {})
(20798, 20799, {})
(20799, 20800, {})
(20800, 20801, {})
(20801, 20802, {})
(20802, 20803, {})
(20803, 20804, {})
(20804, 20805, {})
(20805, 20806, {})
(20806, 20807, {})
(20807, 20808, {})
(20808, 20809, {})
(20809, 20810, {})
(20810, 20811, {})
(20811, 20812, {})
(20812, 20813, {})
(20813, 20814, {})
(20814, 20815, {})
(20815, 20816, {})
(20816, 20817, {})
(20817, 20818, {})
(20818, 20819, {})
(20819, 20820, {})
(20820, 20821, {})
(20821, 20822, {})
(20822, 20823, {})
(20823, 20824, {})
(20824, 20825, {})
(20825, 20826, {})
(20826, 20827, {})
(20827, 20828, {})
(20828, 20829, {})
(20829, 20830, {})
(20830, 20831, {})
(20831, 20832, {})
(20832, 20833, {})
(20833, 20834, {})
(20834, 20835, {})
(20835, 20836, {})
(20836, 20837, {})
(20837, 20838, {})
(20838, 20839, {})
(20839, 20840, {})
(20840, 20841, {})
(20841, 20842, {})
(20842, 20843, {})
(20843, 20844, {})
(20844, 20845, {})
(20845, 20846, {})
(20846, 20847, {})
(20847, 20848, {})
(20848, 20849, {})
(20849, 20850, {})
(20850, 20851, {})
(20851, 20852, {})
(20852, 20853, {})
(20853, 20854, {})
(20854, 20855, {})
(20855, 20856, {})
(20856, 20857, {})
(20857, 20858, {})
(20858, 20859, {})
(20859, 20860, {})
(20860, 20861, {})
(20861, 20862, {})
(20862, 20863, {})
(20863, 20864, {})
(20864, 20865, {})
(20865, 20866, {})
(20866, 20867, {})
(20867, 20868, {})
(20868, 20869, {})
(20869, 20870, {})
(20870, 20871, {})
(20871, 20872, {})
(20872, 20873, {})
(20873, 20874, {})
(20874, 20875, {})
(20875, 20876, {})
(20876, 20877, {})
(20877, 20878, {})
(20878, 20879, {})
(20879, 20880, {})
(20880, 20881, {})
(20881, 20882, {})
(20882, 20883, {})
(20883, 20884, {})
(20884, 20885, {})
(20885, 20886, {})
(20886, 20887, {})
(20887, 20888, {})
(20888, 20889, {})
(20889, 20890, {})
(20890, 20891, {})
(20891, 20892, {})
(20892, 20893, {})
(20893, 20894, {})
(20894, 20895, {})
(20895, 20896, {})
(20896, 20897, {})
(20897, 20898, {})
(20898, 20899, {})
(20899, 20900, {})
(20900, 20901, {})
(20901, 20902, {})
(20902, 20903, {})
(20903, 20904, {})
(20904, 20905, {})
(20905, 20906, {})
(20906, 20907, {})
(20907, 20908, {})
(20908, 20909, {})
(20909, 20910, {})
(20910, 20911, {})
(20911, 20912, {})
(20912, 20913, {})
(20913, 20914, {})
(20914, 20915, {})
(20915, 20916, {})
(20916, 20917, {})
(20917, 20918, {})
(20918, 20919, {})
(20919, 20920, {})
(20920, 20921, {})
(20921, 20922, {})
(20922, 20923, {})
(20923, 20924, {})
(20924, 20925, {})
(20925, 20926, {})
(20926, 20927, {})
(20927, 20928, {})
(20928, 20929, {})
(20929, 20930, {})
(20930, 20931, {})
(20931, 20932, {})
(20932, 20933, {})
(20933, 20934, {})
(20934, 20935, {})
(20935, 20936, {})
(20936, 20937, {})
(20937, 20938, {})
(20938, 20939, {})
(20939, 20940, {})
(20940, 20941, {})
(20941, 20942, {})
(20942, 20943, {})
(20943, 20944, {})
(20944, 20945, {})
(20945, 20946, {})
(20946, 20947, {})
(20947, 20948, {})
(20948, 20949, {})
(20949, 20950, {})
(20950, 20951, {})
(20951, 20952, {})
(20952, 20953, {})
(20953, 20954, {})
(20954, 20955, {})
(20955, 20956, {})
(20956, 20957, {})
(20957, 20958, {})
(20958, 20959, {})
(20959, 20960, {})
(20960, 20961, {})
(20961, 20962, {})
(20962, 20963, {})
(20963, 20964, {})
(20964, 20965, {})
(20965, 20966, {})
(20966, 20967, {})
(20967, 20968, {})
(20968, 20969, {})
(20969, 20970, {})
(20970, 20971, {})
(20971, 20972, {})
(20972, 20973, {})
(20973, 20974, {})
(20974, 20975, {})
(20975, 20976, {})
(20976, 20977, {})
(20977, 20978, {})
(20978, 20979, {})
(20979, 20980, {})
(20980, 20981, {})
(20981, 20982, {})
(20982, 20983, {})
(20983, 20984, {})
(20984, 20985, {})
(20985, 20986, {})
(20986, 20987, {})
(20987, 20988, {})
(20988, 20989, {})
(20989, 20990, {})
(20990, 20991, {})
(20991, 20992, {})
(20992, 20993, {})
(20993, 20994, {})
(20994, 20995, {})
(20995, 20996, {})
(20996, 20997, {})
(20997, 20998, {})
(20998, 20999, {})
(20999, 21000, {})
(21000, 21001, {})
(21001, 21002, {})
(21002, 21003, {})
(21003, 21004, {})
(21004, 21005, {})
(21005, 21006, {})
(21006, 21007, {})
(21007, 21008, {})
(21008, 21009, {})
(21009, 21010, {})
(21010, 21011, {})
(21011, 21012, {})
(21012, 21013, {})
(21013, 21014, {})
(21014, 21015, {})
(21015, 21016, {})
(21016, 21017, {})
(21017, 21018, {})
(21018, 21019, {})
(21019, 21020, {})
(21020, 21021, {})
(21021, 21022, {})
(21022, 21023, {})
(21023, 21024, {})
(21024, 21025, {})
(21025, 21026, {})
(21026, 21027, {})
(21027, 21028, {})
(21028, 21029, {})
(21029, 21030, {})
(21030, 21031, {})
(21031, 21032, {})
(21032, 21033, {})
(21033, 21034, {})
(21034, 21035, {})
(21035, 21036, {})
(21036, 21037, {})
(21037, 21038, {})
(21038, 21039, {})
(21039, 21040, {})
(21040, 21041, {})
(21041, 21042, {})
(21042, 21043, {})
(21043, 21044, {})
(21044, 21045, {})
(21045, 21046, {})
(21046, 21047, {})
(21047, 21048, {})
(21048, 21049, {})
(21049, 21050, {})
(21050, 21051, {})
(21051, 21052, {})
(21052, 21053, {})
(21053, 21054, {})
(21054, 21055, {})
(21055, 21056, {})
(21056, 21057, {})
(21057, 21058, {})
(21058, 21059, {})
(21059, 21060, {})
(21060, 21061, {})
(21061, 21062, {})
(21062, 21063, {})
(21063, 21064, {})
(21064, 21065, {})
(21065, 21066, {})
(21066, 21067, {})
(21067, 21068, {})
(21068, 21069, {})
(21069, 21070, {})
(21070, 21071, {})
(21071, 21072, {})
(21072, 21073, {})
(21073, 21074, {})
(21074, 21075, {})
(21075, 21076, {})
(21076, 21077, {})
(21077, 21078, {})
(21078, 21079, {})
(21079, 21080, {})
(21080, 21081, {})
(21081, 21082, {})
(21082, 21083, {})
(21083, 21084, {})
(21084, 21085, {})
(21085, 21086, {})
(21086, 21087, {})
(21087, 21088, {})
(21088, 21089, {})
(21089, 21090, {})
(21090, 21091, {})
(21091, 21092, {})
(21092, 21093, {})
(21093, 21094, {})
(21094, 21095, {})
(21095, 21096, {})
(21096, 21097, {})
(21097, 21098, {})
(21098, 21099, {})
(21099, 21100, {})
(21100, 21101, {})
(21101, 21102, {})
(21102, 21103, {})
(21103, 21104, {})
(21104, 21105, {})
(21105, 21106, {})
(21106, 21107, {})
(21107, 21108, {})
(21108, 21109, {})
(21109, 21110, {})
(21110, 21111, {})
(21111, 21112, {})
(21112, 21113, {})
(21113, 21114, {})
(21114, 21115, {})
(21115, 21116, {})
(21116, 21117, {})
(21117, 21118, {})
(21118, 21119, {})
(21119, 21120, {})
(21120, 21121, {})
(21121, 21122, {})
(21122, 21123, {})
(21123, 21124, {})
(21124, 21125, {})
(21125, 21126, {})
(21126, 21127, {})
(21127, 21128, {})
(21128, 21129, {})
(21129, 21130, {})
(21130, 21131, {})
(21131, 21132, {})
(21132, 21133, {})
(21133, 21134, {})
(21134, 21135, {})
(21135, 21136, {})
(21136, 21137, {})
(21137, 21138, {})
(21138, 21139, {})
(21139, 21140, {})
(21140, 21141, {})
(21141, 21142, {})
(21142, 21143, {})
(21143, 21144, {})
(21144, 21145, {})
(21145, 21146, {})
(21146, 21147, {})
(21147, 21148, {})
(21148, 21149, {})
(21149, 21150, {})
(21150, 21151, {})
(21151, 21152, {})
(21152, 21153, {})
(21153, 21154, {})
(21154, 21155, {})
(21155, 21156, {})
(21156, 21157, {})
(21157, 21158, {})
(21158, 21159, {})
(21159, 21160, {})
(21160, 21161, {})
(21161, 21162, {})
(21162, 21163, {})
(21163, 21164, {})
(21164, 21165, {})
(21165, 21166, {})
(21166, 21167, {})
(21167, 21168, {})
(21168, 21169, {})
(21169, 21170, {})
(21170, 21171, {})
(21171, 21172, {})
(21172, 21173, {})
(21173, 21174, {})
(21174, 21175, {})
(21175, 21176, {})
(21176, 21177, {})
(21177, 21178, {})
(21178, 21179, {})
(21179, 21180, {})
(21180, 21181, {})
(21181, 21182, {})
(21182, 21183, {})
(21183, 21184, {})
(21184, 21185, {})
(21185, 21186, {})
(21186, 21187, {})
(21187, 21188, {})
(21188, 21189, {})
(21189, 21190, {})
(21190, 21191, {})
(21191, 21192, {})
(21192, 21193, {})
(21193, 21194, {})
(21194, 21195, {})
(21195, 21196, {})
(21196, 21197, {})
(21197, 21198, {})
(21198, 21199, {})
(21199, 21200, {})
(21200, 21201, {})
(21201, 21202, {})
(21202, 21203, {})
(21203, 21204, {})
(21204, 21205, {})
(21205, 21206, {})
(21206, 21207, {})
(21207, 21208, {})
(21208, 21209, {})
(21209, 21210, {})
(21210, 21211, {})
(21211, 21212, {})
(21212, 21213, {})
(21213, 21214, {})
(21214, 21215, {})
(21215, 21216, {})
(21216, 21217, {})
(21217, 21218, {})
(21218, 21219, {})
(21219, 21220, {})
(21220, 21221, {})
(21221, 21222, {})
(21222, 21223, {})
(21223, 21224, {})
(21224, 21225, {})
(21225, 21226, {})
(21226, 21227, {})
(21227, 21228, {})
(21228, 21229, {})
(21229, 21230, {})
(21230, 21231, {})
(21231, 21232, {})
(21232, 21233, {})
(21233, 21234, {})
(21234, 21235, {})
(21235, 21236, {})
(21236, 21237, {})
(21237, 21238, {})
(21238, 21239, {})
(21239, 21240, {})
(21240, 21241, {})
(21241, 21242, {})
(21242, 21243, {})
(21243, 21244, {})
(21244, 21245, {})
(21245, 21246, {})
(21246, 21247, {})
(21247, 21248, {})
(21248, 21249, {})
(21249, 21250, {})
(21250, 21251, {})
(21251, 21252, {})
(21252, 21253, {})
(21253, 21254, {})
(21254, 21255, {})
(21255, 21256, {})
(21256, 21257, {})
(21257, 21258, {})
(21258, 21259, {})
(21259, 21260, {})
(21260, 21261, {})
(21261, 21262, {})
(21262, 21263, {})
(21263, 21264, {})
(21264, 21265, {})
(21265, 21266, {})
(21266, 21267, {})
(21267, 21268, {})
(21268, 21269, {})
(21269, 21270, {})
(21270, 21271, {})
(21271, 21272, {})
(21272, 21273, {})
(21273, 21274, {})
(21274, 21275, {})
(21275, 21276, {})
(21276, 21277, {})
(21277, 21278, {})
(21278, 21279, {})
(21279, 21280, {})
(21280, 21281, {})
(21281, 21282, {})
(21282, 21283, {})
(21283, 21284, {})
(21284, 21285, {})
(21285, 21286, {})
(21286, 21287, {})
(21287, 21288, {})
(21288, 21289, {})
(21289, 21290, {})
(21290, 21291, {})
(21291, 21292, {})
(21292, 21293, {})
(21293, 21294, {})
(21294, 21295, {})
(21295, 21296, {})
(21296, 21297, {})
(21297, 21298, {})
(21298, 21299, {})
(21299, 21300, {})
(21300, 21301, {})
(21301, 21302, {})
(21302, 21303, {})
(21303, 21304, {})
(21304, 21305, {})
(21305, 21306, {})
(21306, 21307, {})
(21307, 21308, {})
(21308, 21309, {})
(21309, 21310, {})
(21310, 21311, {})
(21311, 21312, {})
(21312, 21313, {})
(21313, 21314, {})
(21314, 21315, {})
(21315, 21316, {})
(21316, 21317, {})
(21317, 21318, {})
(21318, 21319, {})
(21319, 21320, {})
(21320, 21321, {})
(21321, 21322, {})
(21322, 21323, {})
(21323, 21324, {})
(21324, 21325, {})
(21325, 21326, {})
(21326, 21327, {})
(21327, 21328, {})
(21328, 21329, {})
(21329, 21330, {})
(21330, 21331, {})
(21331, 21332, {})
(21332, 21333, {})
(21333, 21334, {})
(21334, 21335, {})
(21335, 21336, {})
(21336, 21337, {})
(21337, 21338, {})
(21338, 21339, {})
(21339, 21340, {})
(21340, 21341, {})
(21341, 21342, {})
(21342, 21343, {})
(21343, 21344, {})
(21344, 21345, {})
(21345, 21346, {})
(21346, 21347, {})
(21347, 21348, {})
(21348, 21349, {})
(21349, 21350, {})
(21350, 21351, {})
(21351, 21352, {})
(21352, 21353, {})
(21353, 21354, {})
(21354, 21355, {})
(21355, 21356, {})
(21356, 21357, {})
(21357, 21358, {})
(21358, 21359, {})
(21359, 21360, {})
(21360, 21361, {})
(21361, 21362, {})
(21362, 21363, {})
(21363, 21364, {})
(21364, 21365, {})
(21365, 21366, {})
(21366, 21367, {})
(21367, 21368, {})
(21368, 21369, {})
(21369, 21370, {})
(21370, 21371, {})
(21371, 21372, {})
(21372, 21373, {})
(21373, 21374, {})
(21374, 21375, {})
(21375, 21376, {})
(21376, 21377, {})
(21377, 21378, {})
(21378, 21379, {})
(21379, 21380, {})
(21380, 21381, {})
(21381, 21382, {})
(21382, 21383, {})
(21383, 21384, {})
(21384, 21385, {})
(21385, 21386, {})
(21386, 21387, {})
(21387, 21388, {})
(21388, 21389, {})
(21389, 21390, {})
(21390, 21391, {})
(21391, 21392, {})
(21392, 21393, {})
(21393, 21394, {})
(21394, 21395, {})
(21395, 21396, {})
(21396, 21397, {})
(21397, 21398, {})
(21398, 21399, {})
(21399, 21400, {})
(21400, 21401, {})
(21401, 21402, {})
(21402, 21403, {})
(21403, 21404, {})
(21404, 21405, {})
(21405, 21406, {})
(21406, 21407, {})
(21407, 21408, {})
(21408, 21409, {})
(21409, 21410, {})
(21410, 21411, {})
(21411, 21412, {})
(21412, 21413, {})
(21413, 21414, {})
(21414, 21415, {})
(21415, 21416, {})
(21416, 21417, {})
(21417, 21418, {})
(21418, 21419, {})
(21419, 21420, {})
(21420, 21421, {})
(21421, 21422, {})
(21422, 21423, {})
(21423, 21424, {})
(21424, 21425, {})
(21425, 21426, {})
(21426, 21427, {})
(21427, 21428, {})
(21428, 21429, {})
(21429, 21430, {})
(21430, 21431, {})
(21431, 21432, {})
(21432, 21433, {})
(21433, 21434, {})
(21434, 21435, {})
(21435, 21436, {})
(21436, 21437, {})
(21437, 21438, {})
(21438, 21439, {})
(21439, 21440, {})
(21440, 21441, {})
(21441, 21442, {})
(21442, 21443, {})
(21443, 21444, {})
(21444, 21445, {})
(21445, 21446, {})
(21446, 21447, {})
(21447, 21448, {})
(21448, 21449, {})
(21449, 21450, {})
(21450, 21451, {})
(21451, 21452, {})
(21452, 21453, {})
(21453, 21454, {})
(21454, 21455, {})
(21455, 21456, {})
(21456, 21457, {})
(21457, 21458, {})
(21458, 21459, {})
(21459, 21460, {})
(21460, 21461, {})
(21461, 21462, {})
(21462, 21463, {})
(21463, 21464, {})
(21464, 21465, {})
(21465, 21466, {})
(21466, 21467, {})
(21467, 21468, {})
(21468, 21469, {})
(21469, 21470, {})
(21470, 21471, {})
(21471, 21472, {})
(21472, 21473, {})
(21473, 21474, {})
(21474, 21475, {})
(21475, 21476, {})
(21476, 21477, {})
(21477, 21478, {})
(21478, 21479, {})
(21479, 21480, {})
(21480, 21481, {})
(21481, 21482, {})
(21482, 21483, {})
(21483, 21484, {})
(21484, 21485, {})
(21485, 21486, {})
(21486, 21487, {})
(21487, 21488, {})
(21488, 21489, {})
(21489, 21490, {})
(21490, 21491, {})
(21491, 21492, {})
(21492, 21493, {})
(21493, 21494, {})
(21494, 21495, {})
(21495, 21496, {})
(21496, 21497, {})
(21497, 21498, {})
(21498, 21499, {})
(21499, 21500, {})
(21500, 21501, {})
(21501, 21502, {})
(21502, 21503, {})
(21503, 21504, {})
(21504, 21505, {})
(21505, 21506, {})
(21506, 21507, {})
(21507, 21508, {})
(21508, 21509, {})
(21509, 21510, {})
(21510, 21511, {})
(21511, 21512, {})
(21512, 21513, {})
(21513, 21514, {})
(21514, 21515, {})
(21515, 21516, {})
(21516, 21517, {})
(21517, 21518, {})
(21518, 21519, {})
(21519, 21520, {})
(21520, 21521, {})
(21521, 21522, {})
(21522, 21523, {})
(21523, 21524, {})
(21524, 21525, {})
(21525, 21526, {})
(21526, 21527, {})
(21527, 21528, {})
(21528, 21529, {})
(21529, 21530, {})
(21530, 21531, {})
(21531, 21532, {})
(21532, 21533, {})
(21533, 21534, {})
(21534, 21535, {})
(21535, 21536, {})
(21536, 21537, {})
(21537, 21538, {})
(21538, 21539, {})
(21539, 21540, {})
(21540, 21541, {})
(21541, 21542, {})
(21542, 21543, {})
(21543, 21544, {})
(21544, 21545, {})
(21545, 21546, {})
(21546, 21547, {})
(21547, 21548, {})
(21548, 21549, {})
(21549, 21550, {})
(21550, 21551, {})
(21551, 21552, {})
(21552, 21553, {})
(21553, 21554, {})
(21554, 21555, {})
(21555, 21556, {})
(21556, 21557, {})
(21557, 21558, {})
(21558, 21559, {})
(21559, 21560, {})
(21560, 21561, {})
(21561, 21562, {})
(21562, 21563, {})
(21563, 21564, {})
(21564, 21565, {})
(21565, 21566, {})
(21566, 21567, {})
(21567, 21568, {})
(21568, 21569, {})
(21569, 21570, {})
(21570, 21571, {})
(21571, 21572, {})
(21572, 21573, {})
(21573, 21574, {})
(21574, 21575, {})
(21575, 21576, {})
(21576, 21577, {})
(21577, 21578, {})
(21578, 21579, {})
(21579, 21580, {})
(21580, 21581, {})
(21581, 21582, {})
(21582, 21583, {})
(21583, 21584, {})
(21584, 21585, {})
(21585, 21586, {})
(21586, 21587, {})
(21587, 21588, {})
(21588, 21589, {})
(21589, 21590, {})
(21590, 21591, {})
(21591, 21592, {})
(21592, 21593, {})
(21593, 21594, {})
(21594, 21595, {})
(21595, 21596, {})
(21596, 21597, {})
(21597, 21598, {})
(21598, 21599, {})
(21599, 21600, {})
(21600, 21601, {})
(21601, 21602, {})
(21602, 21603, {})
(21603, 21604, {})
(21604, 21605, {})
(21605, 21606, {})
(21606, 21607, {})
(21607, 21608, {})
(21608, 21609, {})
(21609, 21610, {})
(21610, 21611, {})
(21611, 21612, {})
(21612, 21613, {})
(21613, 21614, {})
(21614, 21615, {})
(21615, 21616, {})
(21616, 21617, {})
(21617, 21618, {})
(21618, 21619, {})
(21619, 21620, {})
(21620, 21621, {})
(21621, 21622, {})
(21622, 21623, {})
(21623, 21624, {})
(21624, 21625, {})
(21625, 21626, {})
(21626, 21627, {})
(21627, 21628, {})
(21628, 21629, {})
(21629, 21630, {})
(21630, 21631, {})
(21631, 21632, {})
(21632, 21633, {})
(21633, 21634, {})
(21634, 21635, {})
(21635, 21636, {})
(21636, 21637, {})
(21637, 21638, {})
(21638, 21639, {})
(21639, 21640, {})
(21640, 21641, {})
(21641, 21642, {})
(21642, 21643, {})
(21643, 21644, {})
(21644, 21645, {})
(21645, 21646, {})
(21646, 21647, {})
(21647, 21648, {})
(21648, 21649, {})
(21649, 21650, {})
(21650, 21651, {})
(21651, 21652, {})
(21652, 21653, {})
(21653, 21654, {})
(21654, 21655, {})
(21655, 21656, {})
(21656, 21657, {})
(21657, 21658, {})
(21658, 21659, {})
(21659, 21660, {})
(21660, 21661, {})
(21661, 21662, {})
(21662, 21663, {})
(21663, 21664, {})
(21664, 21665, {})
(21665, 21666, {})
(21666, 21667, {})
(21667, 21668, {})
(21668, 21669, {})
(21669, 21670, {})
(21670, 21671, {})
(21671, 21672, {})
(21672, 21673, {})
(21673, 21674, {})
(21674, 21675, {})
(21675, 21676, {})
(21676, 21677, {})
(21677, 21678, {})
(21678, 21679, {})
(21679, 21680, {})
(21680, 21681, {})
(21681, 21682, {})
(21682, 21683, {})
(21683, 21684, {})
(21684, 21685, {})
(21685, 21686, {})
(21686, 21687, {})
(21687, 21688, {})
(21688, 21689, {})
(21689, 21690, {})
(21690, 21691, {})
(21691, 21692, {})
(21692, 21693, {})
(21693, 21694, {})
(21694, 21695, {})
(21695, 21696, {})
(21696, 21697, {})
(21697, 21698, {})
(21698, 21699, {})
(21699, 21700, {})
(21700, 21701, {})
(21701, 21702, {})
(21702, 21703, {})
(21703, 21704, {})
(21704, 21705, {})
(21705, 21706, {})
(21706, 21707, {})
(21707, 21708, {})
(21708, 21709, {})
(21709, 21710, {})
(21710, 21711, {})
(21711, 21712, {})
(21712, 21713, {})
(21713, 21714, {})
(21714, 21715, {})
(21715, 21716, {})
(21716, 21717, {})
(21717, 21718, {})
(21718, 21719, {})
(21719, 21720, {})
(21720, 21721, {})
(21721, 21722, {})
(21722, 21723, {})
(21723, 21724, {})
(21724, 21725, {})
(21725, 21726, {})
(21726, 21727, {})
(21727, 21728, {})
(21728, 21729, {})
(21729, 21730, {})
(21730, 21731, {})
(21731, 21732, {})
(21732, 21733, {})
(21733, 21734, {})
(21734, 21735, {})
(21735, 21736, {})
(21736, 21737, {})
(21737, 21738, {})
(21738, 21739, {})
(21739, 21740, {})
(21740, 21741, {})
(21741, 21742, {})
(21742, 21743, {})
(21743, 21744, {})
(21744, 21745, {})
(21745, 21746, {})
(21746, 21747, {})
(21747, 21748, {})
(21748, 21749, {})
(21749, 21750, {})
(21750, 21751, {})
(21751, 21752, {})
(21752, 21753, {})
(21753, 21754, {})
(21754, 21755, {})
(21755, 21756, {})
(21756, 21757, {})
(21757, 21758, {})
(21758, 21759, {})
(21759, 21760, {})
(21760, 21761, {})
(21761, 21762, {})
(21762, 21763, {})
(21763, 21764, {})
(21764, 21765, {})
(21765, 21766, {})
(21766, 21767, {})
(21767, 21768, {})
(21768, 21769, {})
(21769, 21770, {})
(21770, 21771, {})
(21771, 21772, {})
(21772, 21773, {})
(21773, 21774, {})
(21774, 21775, {})
(21775, 21776, {})
(21776, 21777, {})
(21777, 21778, {})
(21778, 21779, {})
(21779, 21780, {})
(21780, 21781, {})
(21781, 21782, {})
(21782, 21783, {})
(21783, 21784, {})
(21784, 21785, {})
(21785, 21786, {})
(21786, 21787, {})
(21787, 21788, {})
(21788, 21789, {})
(21789, 21790, {})
(21790, 21791, {})
(21791, 21792, {})
(21792, 21793, {})
(21793, 21794, {})
(21794, 21795, {})
(21795, 21796, {})
(21796, 21797, {})
(21797, 21798, {})
(21798, 21799, {})
(21799, 21800, {})
(21800, 21801, {})
(21801, 21802, {})
(21802, 21803, {})
(21803, 21804, {})
(21804, 21805, {})
(21805, 21806, {})
(21806, 21807, {})
(21807, 21808, {})
(21808, 21809, {})
(21809, 21810, {})
(21810, 21811, {})
(21811, 21812, {})
(21812, 21813, {})
(21813, 21814, {})
(21814, 21815, {})
(21815, 21816, {})
(21816, 21817, {})
(21817, 21818, {})
(21818, 21819, {})
(21819, 21820, {})
(21820, 21821, {})
(21821, 21822, {})
(21822, 21823, {})
(21823, 21824, {})
(21824, 21825, {})
(21825, 21826, {})
(21826, 21827, {})
(21827, 21828, {})
(21828, 21829, {})
(21829, 21830, {})
(21830, 21831, {})
(21831, 21832, {})
(21832, 21833, {})
(21833, 21834, {})
(21834, 21835, {})
(21835, 21836, {})
(21836, 21837, {})
(21837, 21838, {})
(21838, 21839, {})
(21839, 21840, {})
(21840, 21841, {})
(21841, 21842, {})
(21842, 21843, {})
(21843, 21844, {})
(21844, 21845, {})
(21845, 21846, {})
(21846, 21847, {})
(21847, 21848, {})
(21848, 21849, {})
(21849, 21850, {})
(21850, 21851, {})
(21851, 21852, {})
(21852, 21853, {})
(21853, 21854, {})
(21854, 21855, {})
(21855, 21856, {})
(21856, 21857, {})
(21857, 21858, {})
(21858, 21859, {})
(21859, 21860, {})
(21860, 21861, {})
(21861, 21862, {})
(21862, 21863, {})
(21863, 21864, {})
(21864, 21865, {})
(21865, 21866, {})
(21866, 21867, {})
(21867, 21868, {})
(21868, 21869, {})
(21869, 21870, {})
(21870, 21871, {})
(21871, 21872, {})
(21872, 21873, {})
(21873, 21874, {})
(21874, 21875, {})
(21875, 21876, {})
(21876, 21877, {})
(21877, 21878, {})
(21878, 21879, {})
(21879, 21880, {})
(21880, 21881, {})
(21881, 21882, {})
(21882, 21883, {})
(21883, 21884, {})
(21884, 21885, {})
(21885, 21886, {})
(21886, 21887, {})
(21887, 21888, {})
(21888, 21889, {})
(21889, 21890, {})
(21890, 21891, {})
(21891, 21892, {})
(21892, 21893, {})
(21893, 21894, {})
(21894, 21895, {})
(21895, 21896, {})
(21896, 21897, {})
(21897, 21898, {})
(21898, 21899, {})
(21899, 21900, {})
(21900, 21901, {})
(21901, 21902, {})
(21902, 21903, {})
(21903, 21904, {})
(21904, 21905, {})
(21905, 21906, {})
(21906, 21907, {})
(21907, 21908, {})
(21908, 21909, {})
(21909, 21910, {})
(21910, 21911, {})
(21911, 21912, {})
(21912, 21913, {})
(21913, 21914, {})
(21914, 21915, {})
(21915, 21916, {})
(21916, 21917, {})
(21917, 21918, {})
(21918, 21919, {})
(21919, 21920, {})
(21920, 21921, {})
(21921, 21922, {})
(21922, 21923, {})
(21923, 21924, {})
(21924, 21925, {})
(21925, 21926, {})
(21926, 21927, {})
(21927, 21928, {})
(21928, 21929, {})
(21929, 21930, {})
(21930, 21931, {})
(21931, 21932, {})
(21932, 21933, {})
(21933, 21934, {})
(21934, 21935, {})
(21935, 21936, {})
(21936, 21937, {})
(21937, 21938, {})
(21938, 21939, {})
(21939, 21940, {})
(21940, 21941, {})
(21941, 21942, {})
(21942, 21943, {})
(21943, 21944, {})
(21944, 21945, {})
(21945, 21946, {})
(21946, 21947, {})
(21947, 21948, {})
(21948, 21949, {})
(21949, 21950, {})
(21950, 21951, {})
(21951, 21952, {})
(21952, 21953, {})
(21953, 21954, {})
(21954, 21955, {})
(21955, 21956, {})
(21956, 21957, {})
(21957, 21958, {})
(21958, 21959, {})
(21959, 21960, {})
(21960, 21961, {})
(21961, 21962, {})
(21962, 21963, {})
(21963, 21964, {})
(21964, 21965, {})
(21965, 21966, {})
(21966, 21967, {})
(21967, 21968, {})
(21968, 21969, {})
(21969, 21970, {})
(21970, 21971, {})
(21971, 21972, {})
(21972, 21973, {})
(21973, 21974, {})
(21974, 21975, {})
(21975, 21976, {})
(21976, 21977, {})
(21977, 21978, {})
(21978, 21979, {})
(21979, 21980, {})
(21980, 21981, {})
(21981, 21982, {})
(21982, 21983, {})
(21983, 21984, {})
(21984, 21985, {})
(21985, 21986, {})
(21986, 21987, {})
(21987, 21988, {})
(21988, 21989, {})
(21989, 21990, {})
(21990, 21991, {})
(21991, 21992, {})
(21992, 21993, {})
(21993, 21994, {})
(21994, 21995, {})
(21995, 21996, {})
(21996, 21997, {})
(21997, 21998, {})
(21998, 21999, {})
(21999, 22000, {})
(22000, 22001, {})
(22001, 22002, {})
(22002, 22003, {})
(22003, 22004, {})
(22004, 22005, {})
(22005, 22006, {})
(22006, 22007, {})
(22007, 22008, {})
(22008, 22009, {})
(22009, 22010, {})
(22010, 22011, {})
(22011, 22012, {})
(22012, 22013, {})
(22013, 22014, {})
(22014, 22015, {})
(22015, 22016, {})
(22016, 22017, {})
(22017, 22018, {})
(22018, 22019, {})
(22019, 22020, {})
(22020, 22021, {})
(22021, 22022, {})
(22022, 22023, {})
(22023, 22024, {})
(22024, 22025, {})
(22025, 22026, {})
(22026, 22027, {})
(22027, 22028, {})
(22028, 22029, {})
(22029, 22030, {})
(22030, 22031, {})
(22031, 22032, {})
(22032, 22033, {})
(22033, 22034, {})
(22034, 22035, {})
(22035, 22036, {})
(22036, 22037, {})
(22037, 22038, {})
(22038, 22039, {})
(22039, 22040, {})
(22040, 22041, {})
(22041, 22042, {})
(22042, 22043, {})
(22043, 22044, {})
(22044, 22045, {})
(22045, 22046, {})
(22046, 22047, {})
(22047, 22048, {})
(22048, 22049, {})
(22049, 22050, {})
(22050, 22051, {})
(22051, 22052, {})
(22052, 22053, {})
(22053, 22054, {})
(22054, 22055, {})
(22055, 22056, {})
(22056, 22057, {})
(22057, 22058, {})
(22058, 22059, {})
(22059, 22060, {})
(22060, 22061, {})
(22061, 22062, {})
(22062, 22063, {})
(22063, 22064, {})
(22064, 22065, {})
(22065, 22066, {})
(22066, 22067, {})
(22067, 22068, {})
(22068, 22069, {})
(22069, 22070, {})
(22070, 22071, {})
(22071, 22072, {})
(22072, 22073, {})
(22073, 22074, {})
(22074, 22075, {})
(22075, 22076, {})
(22076, 22077, {})
(22077, 22078, {})
(22078, 22079, {})
(22079, 22080, {})
(22080, 22081, {})
(22081, 22082, {})
(22082, 22083, {})
(22083, 22084, {})
(22084, 22085, {})
(22085, 22086, {})
(22086, 22087, {})
(22087, 22088, {})
(22088, 22089, {})
(22089, 22090, {})
(22090, 22091, {})
(22091, 22092, {})
(22092, 22093, {})
(22093, 22094, {})
(22094, 22095, {})
(22095, 22096, {})
(22096, 22097, {})
(22097, 22098, {})
(22098, 22099, {})
(22099, 22100, {})
(22100, 22101, {})
(22101, 22102, {})
(22102, 22103, {})
(22103, 22104, {})
(22104, 22105, {})
(22105, 22106, {})
(22106, 22107, {})
(22107, 22108, {})
(22108, 22109, {})
(22109, 22110, {})
(22110, 22111, {})
(22111, 22112, {})
(22112, 22113, {})
(22113, 22114, {})
(22114, 22115, {})
(22115, 22116, {})
(22116, 22117, {})
(22117, 22118, {})
(22118, 22119, {})
(22119, 22120, {})
(22120, 22121, {})
(22121, 22122, {})
(22122, 22123, {})
(22123, 22124, {})
(22124, 22125, {})
(22125, 22126, {})
(22126, 22127, {})
(22127, 22128, {})
(22128, 22129, {})
(22129, 22130, {})
(22130, 22131, {})
(22131, 22132, {})
(22132, 22133, {})
(22133, 22134, {})
(22134, 22135, {})
(22135, 22136, {})
(22136, 22137, {})
(22137, 22138, {})
(22138, 22139, {})
(22139, 22140, {})
(22140, 22141, {})
(22141, 22142, {})
(22142, 22143, {})
(22143, 22144, {})
(22144, 22145, {})
(22145, 22146, {})
(22146, 22147, {})
(22147, 22148, {})
(22148, 22149, {})
(22149, 22150, {})
(22150, 22151, {})
(22151, 22152, {})
(22152, 22153, {})
(22153, 22154, {})
(22154, 22155, {})
(22155, 22156, {})
(22156, 22157, {})
(22157, 22158, {})
(22158, 22159, {})
(22159, 22160, {})
(22160, 22161, {})
(22161, 22162, {})
(22162, 22163, {})
(22163, 22164, {})
(22164, 22165, {})
(22165, 22166, {})
(22166, 22167, {})
(22167, 22168, {})
(22168, 22169, {})
(22169, 22170, {})
(22170, 22171, {})
(22171, 22172, {})
(22172, 22173, {})
(22173, 22174, {})
(22174, 22175, {})
(22175, 22176, {})
(22176, 22177, {})
(22177, 22178, {})
(22178, 22179, {})
(22179, 22180, {})
(22180, 22181, {})
(22181, 22182, {})
(22182, 22183, {})
(22183, 22184, {})
(22184, 22185, {})
(22185, 22186, {})
(22186, 22187, {})
(22187, 22188, {})
(22188, 22189, {})
(22189, 22190, {})
(22190, 22191, {})
(22191, 22192, {})
(22192, 22193, {})
(22193, 22194, {})
(22194, 22195, {})
(22195, 22196, {})
(22196, 22197, {})
(22197, 22198, {})
(22198, 22199, {})
(22199, 22200, {})
(22200, 22201, {})
(22201, 22202, {})
(22202, 22203, {})
(22203, 22204, {})
(22204, 22205, {})
(22205, 22206, {})
(22206, 22207, {})
(22207, 22208, {})
(22208, 22209, {})
(22209, 22210, {})
(22210, 22211, {})
(22211, 22212, {})
(22212, 22213, {})
(22213, 22214, {})
(22214, 22215, {})
(22215, 22216, {})
(22216, 22217, {})
(22217, 22218, {})
(22218, 22219, {})
(22219, 22220, {})
(22220, 22221, {})
(22221, 22222, {})
(22222, 22223, {})
(22223, 22224, {})
(22224, 22225, {})
(22225, 22226, {})
(22226, 22227, {})
(22227, 22228, {})
(22228, 22229, {})
(22229, 22230, {})
(22230, 22231, {})
(22231, 22232, {})
(22232, 22233, {})
(22233, 22234, {})
(22234, 22235, {})
(22235, 22236, {})
(22236, 22237, {})
(22237, 22238, {})
(22238, 22239, {})
(22239, 22240, {})
(22240, 22241, {})
(22241, 22242, {})
(22242, 22243, {})
(22243, 22244, {})
(22244, 22245, {})
(22245, 22246, {})
(22246, 22247, {})
(22247, 22248, {})
(22248, 22249, {})
(22249, 22250, {})
(22250, 22251, {})
(22251, 22252, {})
(22252, 22253, {})
(22253, 22254, {})
(22254, 22255, {})
(22255, 22256, {})
(22256, 22257, {})
(22257, 22258, {})
(22258, 22259, {})
(22259, 22260, {})
(22260, 22261, {})
(22261, 22262, {})
(22262, 22263, {})
(22263, 22264, {})
(22264, 22265, {})
(22265, 22266, {})
(22266, 22267, {})
(22267, 22268, {})
(22268, 22269, {})
(22269, 22270, {})
(22270, 22271, {})
(22271, 22272, {})
(22272, 22273, {})
(22273, 22274, {})
(22274, 22275, {})
(22275, 22276, {})
(22276, 22277, {})
(22277, 22278, {})
(22278, 22279, {})
(22279, 22280, {})
(22280, 22281, {})
(22281, 22282, {})
(22282, 22283, {})
(22283, 22284, {})
(22284, 22285, {})
(22285, 22286, {})
(22286, 22287, {})
(22287, 22288, {})
(22288, 22289, {})
(22289, 22290, {})
(22290, 22291, {})
(22291, 22292, {})
(22292, 22293, {})
(22293, 22294, {})
(22294, 22295, {})
(22295, 22296, {})
(22296, 22297, {})
(22297, 22298, {})
(22298, 22299, {})
(22299, 22300, {})
(22300, 22301, {})
(22301, 22302, {})
(22302, 22303, {})
(22303, 22304, {})
(22304, 22305, {})
(22305, 22306, {})
(22306, 22307, {})
(22307, 22308, {})
(22308, 22309, {})
(22309, 22310, {})
(22310, 22311, {})
(22311, 22312, {})
(22312, 22313, {})
(22313, 22314, {})
(22314, 22315, {})
(22315, 22316, {})
(22316, 22317, {})
(22317, 22318, {})
(22318, 22319, {})
(22319, 22320, {})
(22320, 22321, {})
(22321, 22322, {})
(22322, 22323, {})
(22323, 22324, {})
(22324, 22325, {})
(22325, 22326, {})
(22326, 22327, {})
(22327, 22328, {})
(22328, 22329, {})
(22329, 22330, {})
(22330, 22331, {})
(22331, 22332, {})
(22332, 22333, {})
(22333, 22334, {})
(22334, 22335, {})
(22335, 22336, {})
(22336, 22337, {})
(22337, 22338, {})
(22338, 22339, {})
(22339, 22340, {})
(22340, 22341, {})
(22341, 22342, {})
(22342, 22343, {})
(22343, 22344, {})
(22344, 22345, {})
(22345, 22346, {})
(22346, 22347, {})
(22347, 22348, {})
(22348, 22349, {})
(22349, 22350, {})
(22350, 22351, {})
(22351, 22352, {})
(22352, 22353, {})
(22353, 22354, {})
(22354, 22355, {})
(22355, 22356, {})
(22356, 22357, {})
(22357, 22358, {})
(22358, 22359, {})
(22359, 22360, {})
(22360, 22361, {})
(22361, 22362, {})
(22362, 22363, {})
(22363, 22364, {})
(22364, 22365, {})
(22365, 22366, {})
(22366, 22367, {})
(22367, 22368, {})
(22368, 22369, {})
(22369, 22370, {})
(22370, 22371, {})
(22371, 22372, {})
(22372, 22373, {})
(22373, 22374, {})
(22374, 22375, {})
(22375, 22376, {})
(22376, 22377, {})
(22377, 22378, {})
(22378, 22379, {})
(22379, 22380, {})
(22380, 22381, {})
(22381, 22382, {})
(22382, 22383, {})
(22383, 22384, {})
(22384, 22385, {})
(22385, 22386, {})
(22386, 22387, {})
(22387, 22388, {})
(22388, 22389, {})
(22389, 22390, {})
(22390, 22391, {})
(22391, 22392, {})
(22392, 22393, {})
(22393, 22394, {})
(22394, 22395, {})
(22395, 22396, {})
(22396, 22397, {})
(22397, 22398, {})
(22398, 22399, {})
(22399, 22400, {})
(22400, 22401, {})
(22401, 22402, {})
(22402, 22403, {})
(22403, 22404, {})
(22404, 22405, {})
(22405, 22406, {})
(22406, 22407, {})
(22407, 22408, {})
(22408, 22409, {})
(22409, 22410, {})
(22410, 22411, {})
(22411, 22412, {})
(22412, 22413, {})
(22413, 22414, {})
(22414, 22415, {})
(22415, 22416, {})
(22416, 22417, {})
(22417, 22418, {})
(22418, 22419, {})
(22419, 22420, {})
(22420, 22421, {})
(22421, 22422, {})
(22422, 22423, {})
(22423, 22424, {})
(22424, 22425, {})
(22425, 22426, {})
(22426, 22427, {})
(22427, 22428, {})
(22428, 22429, {})
(22429, 22430, {})
(22430, 22431, {})
(22431, 22432, {})
(22432, 22433, {})
(22433, 22434, {})
(22434, 22435, {})
(22435, 22436, {})
(22436, 22437, {})
(22437, 22438, {})
(22438, 22439, {})
(22439, 22440, {})
(22440, 22441, {})
(22441, 22442, {})
(22442, 22443, {})
(22443, 22444, {})
(22444, 22445, {})
(22445, 22446, {})
(22446, 22447, {})
(22447, 22448, {})
(22448, 22449, {})
(22449, 22450, {})
(22450, 22451, {})
(22451, 22452, {})
(22452, 22453, {})
(22453, 22454, {})
(22454, 22455, {})
(22455, 22456, {})
(22456, 22457, {})
(22457, 22458, {})
(22458, 22459, {})
(22459, 22460, {})
(22460, 22461, {})
(22461, 22462, {})
(22462, 22463, {})
(22463, 22464, {})
(22464, 22465, {})
(22465, 22466, {})
(22466, 22467, {})
(22467, 22468, {})
(22468, 22469, {})
(22469, 22470, {})
(22470, 22471, {})
(22471, 22472, {})
(22472, 22473, {})
(22473, 22474, {})
(22474, 22475, {})
(22475, 22476, {})
(22476, 22477, {})
(22477, 22478, {})
(22478, 22479, {})
(22479, 22480, {})
(22480, 22481, {})
(22481, 22482, {})
(22482, 22483, {})
(22483, 22484, {})
(22484, 22485, {})
(22485, 22486, {})
(22486, 22487, {})
(22487, 22488, {})
(22488, 22489, {})
(22489, 22490, {})
(22490, 22491, {})
(22491, 22492, {})
(22492, 22493, {})
(22493, 22494, {})
(22494, 22495, {})
(22495, 22496, {})
(22496, 22497, {})
(22497, 22498, {})
(22498, 22499, {})
(22499, 22500, {})
(22500, 22501, {})
(22501, 22502, {})
(22502, 22503, {})
(22503, 22504, {})
(22504, 22505, {})
(22505, 22506, {})
(22506, 22507, {})
(22507, 22508, {})
(22508, 22509, {})
(22509, 22510, {})
(22510, 22511, {})
(22511, 22512, {})
(22512, 22513, {})
(22513, 22514, {})
(22514, 22515, {})
(22515, 22516, {})
(22516, 22517, {})
(22517, 22518, {})
(22518, 22519, {})
(22519, 22520, {})
(22520, 22521, {})
(22521, 22522, {})
(22522, 22523, {})
(22523, 22524, {})
(22524, 22525, {})
(22525, 22526, {})
(22526, 22527, {})
(22527, 22528, {})
(22528, 22529, {})
(22529, 22530, {})
(22530, 22531, {})
(22531, 22532, {})
(22532, 22533, {})
(22533, 22534, {})
(22534, 22535, {})
(22535, 22536, {})
(22536, 22537, {})
(22537, 22538, {})
(22538, 22539, {})
(22539, 22540, {})
(22540, 22541, {})
(22541, 22542, {})
(22542, 22543, {})
(22543, 22544, {})
(22544, 22545, {})
(22545, 22546, {})
(22546, 22547, {})
(22547, 22548, {})
(22548, 22549, {})
(22549, 22550, {})
(22550, 22551, {})
(22551, 22552, {})
(22552, 22553, {})
(22553, 22554, {})
(22554, 22555, {})
(22555, 22556, {})
(22556, 22557, {})
(22557, 22558, {})
(22558, 22559, {})
(22559, 22560, {})
(22560, 22561, {})
(22561, 22562, {})
(22562, 22563, {})
(22563, 22564, {})
(22564, 22565, {})
(22565, 22566, {})
(22566, 22567, {})
(22567, 22568, {})
(22568, 22569, {})
(22569, 22570, {})
(22570, 22571, {})
(22571, 22572, {})
(22572, 22573, {})
(22573, 22574, {})
(22574, 22575, {})
(22575, 22576, {})
(22576, 22577, {})
(22577, 22578, {})
(22578, 22579, {})
(22579, 22580, {})
(22580, 22581, {})
(22581, 22582, {})
(22582, 22583, {})
(22583, 22584, {})
(22584, 22585, {})
(22585, 22586, {})
(22586, 22587, {})
(22587, 22588, {})
(22588, 22589, {})
(22589, 22590, {})
(22590, 22591, {})
(22591, 22592, {})
(22592, 22593, {})
(22593, 22594, {})
(22594, 22595, {})
(22595, 22596, {})
(22596, 22597, {})
(22597, 22598, {})
(22598, 22599, {})
(22599, 22600, {})
(22600, 22601, {})
(22601, 22602, {})
(22602, 22603, {})
(22603, 22604, {})
(22604, 22605, {})
(22605, 22606, {})
(22606, 22607, {})
(22607, 22608, {})
(22608, 22609, {})
(22609, 22610, {})
(22610, 22611, {})
(22611, 22612, {})
(22612, 22613, {})
(22613, 22614, {})
(22614, 22615, {})
(22615, 22616, {})
(22616, 22617, {})
(22617, 22618, {})
(22618, 22619, {})
(22619, 22620, {})
(22620, 22621, {})
(22621, 22622, {})
(22622, 22623, {})
(22623, 22624, {})
(22624, 22625, {})
(22625, 22626, {})
(22626, 22627, {})
(22627, 22628, {})
(22628, 22629, {})
(22629, 22630, {})
(22630, 22631, {})
(22631, 22632, {})
(22632, 22633, {})
(22633, 22634, {})
(22634, 22635, {})
(22635, 22636, {})
(22636, 22637, {})
(22637, 22638, {})
(22638, 22639, {})
(22639, 22640, {})
(22640, 22641, {})
(22641, 22642, {})
(22642, 22643, {})
(22643, 22644, {})
(22644, 22645, {})
(22645, 22646, {})
(22646, 22647, {})
(22647, 22648, {})
(22648, 22649, {})
(22649, 22650, {})
(22650, 22651, {})
(22651, 22652, {})
(22652, 22653, {})
(22653, 22654, {})
(22654, 22655, {})
(22655, 22656, {})
(22656, 22657, {})
(22657, 22658, {})
(22658, 22659, {})
(22659, 22660, {})
(22660, 22661, {})
(22661, 22662, {})
(22662, 22663, {})
(22663, 22664, {})
(22664, 22665, {})
(22665, 22666, {})
(22666, 22667, {})
(22667, 22668, {})
(22668, 22669, {})
(22669, 22670, {})
(22670, 22671, {})
(22671, 22672, {})
(22672, 22673, {})
(22673, 22674, {})
(22674, 22675, {})
(22675, 22676, {})
(22676, 22677, {})
(22677, 22678, {})
(22678, 22679, {})
(22679, 22680, {})
(22680, 22681, {})
(22681, 22682, {})
(22682, 22683, {})
(22683, 22684, {})
(22684, 22685, {})
(22685, 22686, {})
(22686, 22687, {})
(22687, 22688, {})
(22688, 22689, {})
(22689, 22690, {})
(22690, 22691, {})
(22691, 22692, {})
(22692, 22693, {})
(22693, 22694, {})
(22694, 22695, {})
(22695, 22696, {})
(22696, 22697, {})
(22697, 22698, {})
(22698, 22699, {})
(22699, 22700, {})
(22700, 22701, {})
(22701, 22702, {})
(22702, 22703, {})
(22703, 22704, {})
(22704, 22705, {})
(22705, 22706, {})
(22706, 22707, {})
(22707, 22708, {})
(22708, 22709, {})
(22709, 22710, {})
(22710, 22711, {})
(22711, 22712, {})
(22712, 22713, {})
(22713, 22714, {})
(22714, 22715, {})
(22715, 22716, {})
(22716, 22717, {})
(22717, 22718, {})
(22718, 22719, {})
(22719, 22720, {})
(22720, 22721, {})
(22721, 22722, {})
(22722, 22723, {})
(22723, 22724, {})
(22724, 22725, {})
(22725, 22726, {})
(22726, 22727, {})
(22727, 22728, {})
(22728, 22729, {})
(22729, 22730, {})
(22730, 22731, {})
(22731, 22732, {})
(22732, 22733, {})
(22733, 22734, {})
(22734, 22735, {})
(22735, 22736, {})
(22736, 22737, {})
(22737, 22738, {})
(22738, 22739, {})
(22739, 22740, {})
(22740, 22741, {})
(22741, 22742, {})
(22742, 22743, {})
(22743, 22744, {})
(22744, 22745, {})
(22745, 22746, {})
(22746, 22747, {})
(22747, 22748, {})
(22748, 22749, {})
(22749, 22750, {})
(22750, 22751, {})
(22751, 22752, {})
(22752, 22753, {})
(22753, 22754, {})
(22754, 22755, {})
(22755, 22756, {})
(22756, 22757, {})
(22757, 22758, {})
(22758, 22759, {})
(22759, 22760, {})
(22760, 22761, {})
(22761, 22762, {})
(22762, 22763, {})
(22763, 22764, {})
(22764, 22765, {})
(22765, 22766, {})
(22766, 22767, {})
(22767, 22768, {})
(22768, 22769, {})
(22769, 22770, {})
(22770, 22771, {})
(22771, 22772, {})
(22772, 22773, {})
(22773, 22774, {})
(22774, 22775, {})
(22775, 22776, {})
(22776, 22777, {})
(22777, 22778, {})
(22778, 22779, {})
(22779, 22780, {})
(22780, 22781, {})
(22781, 22782, {})
(22782, 22783, {})
(22783, 22784, {})
(22784, 22785, {})
(22785, 22786, {})
(22786, 22787, {})
(22787, 22788, {})
(22788, 22789, {})
(22789, 22790, {})
(22790, 22791, {})
(22791, 22792, {})
(22792, 22793, {})
(22793, 22794, {})
(22794, 22795, {})
(22795, 22796, {})
(22796, 22797, {})
(22797, 22798, {})
(22798, 22799, {})
(22799, 22800, {})
(22800, 22801, {})
(22801, 22802, {})
(22802, 22803, {})
(22803, 22804, {})
(22804, 22805, {})
(22805, 22806, {})
(22806, 22807, {})
(22807, 22808, {})
(22808, 22809, {})
(22809, 22810, {})
(22810, 22811, {})
(22811, 22812, {})
(22812, 22813, {})
(22813, 22814, {})
(22814, 22815, {})
(22815, 22816, {})
(22816, 22817, {})
(22817, 22818, {})
(22818, 22819, {})
(22819, 22820, {})
(22820, 22821, {})
(22821, 22822, {})
(22822, 22823, {})
(22823, 22824, {})
(22824, 22825, {})
(22825, 22826, {})
(22826, 22827, {})
(22827, 22828, {})
(22828, 22829, {})
(22829, 22830, {})
(22830, 22831, {})
(22831, 22832, {})
(22832, 22833, {})
(22833, 22834, {})
(22834, 22835, {})
(22835, 22836, {})
(22836, 22837, {})
(22837, 22838, {})
(22838, 22839, {})
(22839, 22840, {})
(22840, 22841, {})
(22841, 22842, {})
(22842, 22843, {})
(22843, 22844, {})
(22844, 22845, {})
(22845, 22846, {})
(22846, 22847, {})
(22847, 22848, {})
(22848, 22849, {})
(22849, 22850, {})
(22850, 22851, {})
(22851, 22852, {})
(22852, 22853, {})
(22853, 22854, {})
(22854, 22855, {})
(22855, 22856, {})
(22856, 22857, {})
(22857, 22858, {})
(22858, 22859, {})
(22859, 22860, {})
(22860, 22861, {})
(22861, 22862, {})
(22862, 22863, {})
(22863, 22864, {})
(22864, 22865, {})
(22865, 22866, {})
(22866, 22867, {})
(22867, 22868, {})
(22868, 22869, {})
(22869, 22870, {})
(22870, 22871, {})
(22871, 22872, {})
(22872, 22873, {})
(22873, 22874, {})
(22874, 22875, {})
(22875, 22876, {})
(22876, 22877, {})
(22877, 22878, {})
(22878, 22879, {})
(22879, 22880, {})
(22880, 22881, {})
(22881, 22882, {})
(22882, 22883, {})
(22883, 22884, {})
(22884, 22885, {})
(22885, 22886, {})
(22886, 22887, {})
(22887, 22888, {})
(22888, 22889, {})
(22889, 22890, {})
(22890, 22891, {})
(22891, 22892, {})
(22892, 22893, {})
(22893, 22894, {})
(22894, 22895, {})
(22895, 22896, {})
(22896, 22897, {})
(22897, 22898, {})
(22898, 22899, {})
(22899, 22900, {})
(22900, 22901, {})
(22901, 22902, {})
(22902, 22903, {})
(22903, 22904, {})
(22904, 22905, {})
(22905, 22906, {})
(22906, 22907, {})
(22907, 22908, {})
(22908, 22909, {})
(22909, 22910, {})
(22910, 22911, {})
(22911, 22912, {})
(22912, 22913, {})
(22913, 22914, {})
(22914, 22915, {})
(22915, 22916, {})
(22916, 22917, {})
(22917, 22918, {})
(22918, 22919, {})
(22919, 22920, {})
(22920, 22921, {})
(22921, 22922, {})
(22922, 22923, {})
(22923, 22924, {})
(22924, 22925, {})
(22925, 22926, {})
(22926, 22927, {})
(22927, 22928, {})
(22928, 22929, {})
(22929, 22930, {})
(22930, 22931, {})
(22931, 22932, {})
(22932, 22933, {})
(22933, 22934, {})
(22934, 22935, {})
(22935, 22936, {})
(22936, 22937, {})
(22937, 22938, {})
(22938, 22939, {})
(22939, 22940, {})
(22940, 22941, {})
(22941, 22942, {})
(22942, 22943, {})
(22943, 22944, {})
(22944, 22945, {})
(22945, 22946, {})
(22946, 22947, {})
(22947, 22948, {})
(22948, 22949, {})
(22949, 22950, {})
(22950, 22951, {})
(22951, 22952, {})
(22952, 22953, {})
(22953, 22954, {})
(22954, 22955, {})
(22955, 22956, {})
(22956, 22957, {})
(22957, 22958, {})
(22958, 22959, {})
(22959, 22960, {})
(22960, 22961, {})
(22961, 22962, {})
(22962, 22963, {})
(22963, 22964, {})
(22964, 22965, {})
(22965, 22966, {})
(22966, 22967, {})
(22967, 22968, {})
(22968, 22969, {})
(22969, 22970, {})
(22970, 22971, {})
(22971, 22972, {})
(22972, 22973, {})
(22973, 22974, {})
(22974, 22975, {})
(22975, 22976, {})
(22976, 22977, {})
(22977, 22978, {})
(22978, 22979, {})
(22979, 22980, {})
(22980, 22981, {})
(22981, 22982, {})
(22982, 22983, {})
(22983, 22984, {})
(22984, 22985, {})
(22985, 22986, {})
(22986, 22987, {})
(22987, 22988, {})
(22988, 22989, {})
(22989, 22990, {})
(22990, 22991, {})
(22991, 22992, {})
(22992, 22993, {})
(22993, 22994, {})
(22994, 22995, {})
(22995, 22996, {})
(22996, 22997, {})
(22997, 22998, {})
(22998, 22999, {})
(22999, 23000, {})
(23000, 23001, {})
(23001, 23002, {})
(23002, 23003, {})
(23003, 23004, {})
(23004, 23005, {})
(23005, 23006, {})
(23006, 23007, {})
(23007, 23008, {})
(23008, 23009, {})
(23009, 23010, {})
(23010, 23011, {})
(23011, 23012, {})
(23012, 23013, {})
(23013, 23014, {})
(23014, 23015, {})
(23015, 23016, {})
(23016, 23017, {})
(23017, 23018, {})
(23018, 23019, {})
(23019, 23020, {})
(23020, 23021, {})
(23021, 23022, {})
(23022, 23023, {})
(23023, 23024, {})
(23024, 23025, {})
(23025, 23026, {})
(23026, 23027, {})
(23027, 23028, {})
(23028, 23029, {})
(23029, 23030, {})
(23030, 23031, {})
(23031, 23032, {})
(23032, 23033, {})
(23033, 23034, {})
(23034, 23035, {})
(23035, 23036, {})
(23036, 23037, {})
(23037, 23038, {})
(23038, 23039, {})
(23039, 23040, {})
(23040, 23041, {})
(23041, 23042, {})
(23042, 23043, {})
(23043, 23044, {})
(23044, 23045, {})
(23045, 23046, {})
(23046, 23047, {})
(23047, 23048, {})
(23048, 23049, {})
(23049, 23050, {})
(23050, 23051, {})
(23051, 23052, {})
(23052, 23053, {})
(23053, 23054, {})
(23054, 23055, {})
(23055, 23056, {})
(23056, 23057, {})
(23057, 23058, {})
(23058, 23059, {})
(23059, 23060, {})
(23060, 23061, {})
(23061, 23062, {})
(23062, 23063, {})
(23063, 23064, {})
(23064, 23065, {})
(23065, 23066, {})
(23066, 23067, {})
(23067, 23068, {})
(23068, 23069, {})
(23069, 23070, {})
(23070, 23071, {})
(23071, 23072, {})
(23072, 23073, {})
(23073, 23074, {})
(23074, 23075, {})
(23075, 23076, {})
(23076, 23077, {})
(23077, 23078, {})
(23078, 23079, {})
(23079, 23080, {})
(23080, 23081, {})
(23081, 23082, {})
(23082, 23083, {})
(23083, 23084, {})
(23084, 23085, {})
(23085, 23086, {})
(23086, 23087, {})
(23087, 23088, {})
(23088, 23089, {})
(23089, 23090, {})
(23090, 23091, {})
(23091, 23092, {})
(23092, 23093, {})
(23093, 23094, {})
(23094, 23095, {})
(23095, 23096, {})
(23096, 23097, {})
(23097, 23098, {})
(23098, 23099, {})
(23099, 23100, {})
(23100, 23101, {})
(23101, 23102, {})
(23102, 23103, {})
(23103, 23104, {})
(23104, 23105, {})
(23105, 23106, {})
(23106, 23107, {})
(23107, 23108, {})
(23108, 23109, {})
(23109, 23110, {})
(23110, 23111, {})
(23111, 23112, {})
(23112, 23113, {})
(23113, 23114, {})
(23114, 23115, {})
(23115, 23116, {})
(23116, 23117, {})
(23117, 23118, {})
(23118, 23119, {})
(23119, 23120, {})
(23120, 23121, {})
(23121, 23122, {})
(23122, 23123, {})
(23123, 23124, {})
(23124, 23125, {})
(23125, 23126, {})
(23126, 23127, {})
(23127, 23128, {})
(23128, 23129, {})
(23129, 23130, {})
(23130, 23131, {})
(23131, 23132, {})
(23132, 23133, {})
(23133, 23134, {})
(23134, 23135, {})
(23135, 23136, {})
(23136, 23137, {})
(23137, 23138, {})
(23138, 23139, {})
(23139, 23140, {})
(23140, 23141, {})
(23141, 23142, {})
(23142, 23143, {})
(23143, 23144, {})
(23144, 23145, {})
(23145, 23146, {})
(23146, 23147, {})
(23147, 23148, {})
(23148, 23149, {})
(23149, 23150, {})
(23150, 23151, {})
(23151, 23152, {})
(23152, 23153, {})
(23153, 23154, {})
(23154, 23155, {})
(23155, 23156, {})
(23156, 23157, {})
(23157, 23158, {})
(23158, 23159, {})
(23159, 23160, {})
(23160, 23161, {})
(23161, 23162, {})
(23162, 23163, {})
(23163, 23164, {})
(23164, 23165, {})
(23165, 23166, {})
(23166, 23167, {})
(23167, 23168, {})
(23168, 23169, {})
(23169, 23170, {})
(23170, 23171, {})
(23171, 23172, {})
(23172, 23173, {})
(23173, 23174, {})
(23174, 23175, {})
(23175, 23176, {})
(23176, 23177, {})
(23177, 23178, {})
(23178, 23179, {})
(23179, 23180, {})
(23180, 23181, {})
(23181, 23182, {})
(23182, 23183, {})
(23183, 23184, {})
(23184, 23185, {})
(23185, 23186, {})
(23186, 23187, {})
(23187, 23188, {})
(23188, 23189, {})
(23189, 23190, {})
(23190, 23191, {})
(23191, 23192, {})
(23192, 23193, {})
(23193, 23194, {})
(23194, 23195, {})
(23195, 23196, {})
(23196, 23197, {})
(23197, 23198, {})
(23198, 23199, {})
(23199, 23200, {})
(23200, 23201, {})
(23201, 23202, {})
(23202, 23203, {})
(23203, 23204, {})
(23204, 23205, {})
(23205, 23206, {})
(23206, 23207, {})
(23207, 23208, {})
(23208, 23209, {})
(23209, 23210, {})
(23210, 23211, {})
(23211, 23212, {})
(23212, 23213, {})
(23213, 23214, {})
(23214, 23215, {})
(23215, 23216, {})
(23216, 23217, {})
(23217, 23218, {})
(23218, 23219, {})
(23219, 23220, {})
(23220, 23221, {})
(23221, 23222, {})
(23222, 23223, {})
(23223, 23224, {})
(23224, 23225, {})
(23225, 23226, {})
(23226, 23227, {})
(23227, 23228, {})
(23228, 23229, {})
(23229, 23230, {})
(23230, 23231, {})
(23231, 23232, {})
(23232, 23233, {})
(23233, 23234, {})
(23234, 23235, {})
(23235, 23236, {})
(23236, 23237, {})
(23237, 23238, {})
(23238, 23239, {})
(23239, 23240, {})
(23240, 23241, {})
(23241, 23242, {})
(23242, 23243, {})
(23243, 23244, {})
(23244, 23245, {})
(23245, 23246, {})
(23246, 23247, {})
(23247, 23248, {})
(23248, 23249, {})
(23249, 23250, {})
(23250, 23251, {})
(23251, 23252, {})
(23252, 23253, {})
(23253, 23254, {})
(23254, 23255, {})
(23255, 23256, {})
(23256, 23257, {})
(23257, 23258, {})
(23258, 23259, {})
(23259, 23260, {})
(23260, 23261, {})
(23261, 23262, {})
(23262, 23263, {})
(23263, 23264, {})
(23264, 23265, {})
(23265, 23266, {})
(23266, 23267, {})
(23267, 23268, {})
(23268, 23269, {})
(23269, 23270, {})
(23270, 23271, {})
(23271, 23272, {})
(23272, 23273, {})
(23273, 23274, {})
(23274, 23275, {})
(23275, 23276, {})
(23276, 23277, {})
(23277, 23278, {})
(23278, 23279, {})
(23279, 23280, {})
(23280, 23281, {})
(23281, 23282, {})
(23282, 23283, {})
(23283, 23284, {})
(23284, 23285, {})
(23285, 23286, {})
(23286, 23287, {})
(23287, 23288, {})
(23288, 23289, {})
(23289, 23290, {})
(23290, 23291, {})
(23291, 23292, {})
(23292, 23293, {})
(23293, 23294, {})
(23294, 23295, {})
(23295, 23296, {})
(23296, 23297, {})
(23297, 23298, {})
(23298, 23299, {})
(23299, 23300, {})
(23300, 23301, {})
(23301, 23302, {})
(23302, 23303, {})
(23303, 23304, {})
(23304, 23305, {})
(23305, 23306, {})
(23306, 23307, {})
(23307, 23308, {})
(23308, 23309, {})
(23309, 23310, {})
(23310, 23311, {})
(23311, 23312, {})
(23312, 23313, {})
(23313, 23314, {})
(23314, 23315, {})
(23315, 23316, {})
(23316, 23317, {})
(23317, 23318, {})
(23318, 23319, {})
(23319, 23320, {})
(23320, 23321, {})
(23321, 23322, {})
(23322, 23323, {})
(23323, 23324, {})
(23324, 23325, {})
(23325, 23326, {})
(23326, 23327, {})
(23327, 23328, {})
(23328, 23329, {})
(23329, 23330, {})
(23330, 23331, {})
(23331, 23332, {})
(23332, 23333, {})
(23333, 23334, {})
(23334, 23335, {})
(23335, 23336, {})
(23336, 23337, {})
(23337, 23338, {})
(23338, 23339, {})
(23339, 23340, {})
(23340, 23341, {})
(23341, 23342, {})
(23342, 23343, {})
(23343, 23344, {})
(23344, 23345, {})
(23345, 23346, {})
(23346, 23347, {})
(23347, 23348, {})
(23348, 23349, {})
(23349, 23350, {})
(23350, 23351, {})
(23351, 23352, {})
(23352, 23353, {})
(23353, 23354, {})
(23354, 23355, {})
(23355, 23356, {})
(23356, 23357, {})
(23357, 23358, {})
(23358, 23359, {})
(23359, 23360, {})
(23360, 23361, {})
(23361, 23362, {})
(23362, 23363, {})
(23363, 23364, {})
(23364, 23365, {})
(23365, 23366, {})
(23366, 23367, {})
(23367, 23368, {})
(23368, 23369, {})
(23369, 23370, {})
(23370, 23371, {})
(23371, 23372, {})
(23372, 23373, {})
(23373, 23374, {})
(23374, 23375, {})
(23375, 23376, {})
(23376, 23377, {})
(23377, 23378, {})
(23378, 23379, {})
(23379, 23380, {})
(23380, 23381, {})
(23381, 23382, {})
(23382, 23383, {})
(23383, 23384, {})
(23384, 23385, {})
(23385, 23386, {})
(23386, 23387, {})
(23387, 23388, {})
(23388, 23389, {})
(23389, 23390, {})
(23390, 23391, {})
(23391, 23392, {})
(23392, 23393, {})
(23393, 23394, {})
(23394, 23395, {})
(23395, 23396, {})
(23396, 23397, {})
(23397, 23398, {})
(23398, 23399, {})
(23399, 23400, {})
(23400, 23401, {})
(23401, 23402, {})
(23402, 23403, {})
(23403, 23404, {})
(23404, 23405, {})
(23405, 23406, {})
(23406, 23407, {})
(23407, 23408, {})
(23408, 23409, {})
(23409, 23410, {})
(23410, 23411, {})
(23411, 23412, {})
(23412, 23413, {})
(23413, 23414, {})
(23414, 23415, {})
(23415, 23416, {})
(23416, 23417, {})
(23417, 23418, {})
(23418, 23419, {})
(23419, 23420, {})
(23420, 23421, {})
(23421, 23422, {})
(23422, 23423, {})
(23423, 23424, {})
(23424, 23425, {})
(23425, 23426, {})
(23426, 23427, {})
(23427, 23428, {})
(23428, 23429, {})
(23429, 23430, {})
(23430, 23431, {})
(23431, 23432, {})
(23432, 23433, {})
(23433, 23434, {})
(23434, 23435, {})
(23435, 23436, {})
(23436, 23437, {})
(23437, 23438, {})
(23438, 23439, {})
(23439, 23440, {})
(23440, 23441, {})
(23441, 23442, {})
(23442, 23443, {})
(23443, 23444, {})
(23444, 23445, {})
(23445, 23446, {})
(23446, 23447, {})
(23447, 23448, {})
(23448, 23449, {})
(23449, 23450, {})
(23450, 23451, {})
(23451, 23452, {})
(23452, 23453, {})
(23453, 23454, {})
(23454, 23455, {})
(23455, 23456, {})
(23456, 23457, {})
(23457, 23458, {})
(23458, 23459, {})
(23459, 23460, {})
(23460, 23461, {})
(23461, 23462, {})
(23462, 23463, {})
(23463, 23464, {})
(23464, 23465, {})
(23465, 23466, {})
(23466, 23467, {})
(23467, 23468, {})
(23468, 23469, {})
(23469, 23470, {})
(23470, 23471, {})
(23471, 23472, {})
(23472, 23473, {})
(23473, 23474, {})
(23474, 23475, {})
(23475, 23476, {})
(23476, 23477, {})
(23477, 23478, {})
(23478, 23479, {})
(23479, 23480, {})
04thAug2024 Implemtneation
In [ ]:
!pip install numpy pandas scikit-learn tensorflow networkx nltk
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.25.2) Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2) Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0) Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (3.3) Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (3.8.1) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.11.4) Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0) Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3) Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25) Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.6.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0) Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1) Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.1) Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3) Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (71.0.4) Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0) Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.12.2) Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.37.1) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.64.1) Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2) Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk) (8.1.7) Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk) (2024.5.15) Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk) (4.66.4) Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0) Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0) Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.1) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0) Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2) Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.3) Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.4.0) Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0) Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9) Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.7) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.7.4) Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5) Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0) Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)
In [ ]:
import numpy as np
import pandas as pd
import networkx as nx
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model
import re
import nltk
from nltk.corpus import stopwords
# Download NLTK stopwords
nltk.download('stopwords')
STOPWORDS = set(stopwords.words('english'))
[nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Unzipping corpora/stopwords.zip.
In [ ]:
# Load the dataset from the uploaded file
df = pd.read_csv('/content/fake.csv')
df.head()
Out[ ]:
| title | text | subject | date | |
|---|---|---|---|---|
| 0 | Donald Trump Sends Out Embarrassing New Year’... | Donald Trump just couldn t wish all Americans ... | News | December 31, 2017 |
| 1 | Drunk Bragging Trump Staffer Started Russian ... | House Intelligence Committee Chairman Devin Nu... | News | December 31, 2017 |
| 2 | Sheriff David Clarke Becomes An Internet Joke... | On Friday, it was revealed that former Milwauk... | News | December 30, 2017 |
| 3 | Trump Is So Obsessed He Even Has Obama’s Name... | On Christmas day, Donald Trump announced that ... | News | December 29, 2017 |
| 4 | Pope Francis Just Called Out Donald Trump Dur... | Pope Francis used his annual Christmas Day mes... | News | December 25, 2017 |
In [ ]:
# Display the first few rows of the dataset
print(df.head())
# Display information about the dataset
print(df.info())
# Display summary statistics of the dataset
print(df.describe())
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
In [ ]:
# Step 1: Setting Up the Environment
!pip install numpy pandas scikit-learn tensorflow networkx nltk
import numpy as np
import pandas as pd
import networkx as nx
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model
import re
import nltk
from nltk.corpus import stopwords
# Download NLTK stopwords
nltk.download('stopwords')
STOPWORDS = set(stopwords.words('english'))
# Step 2: Loading the Dataset
from google.colab import files
uploaded = files.upload()
# Load the dataset from the uploaded file
df = pd.read_csv('/content/fake.csv')
# Step 3: Inspecting the Dataset
# Display the first few rows of the dataset
print(df.head())
# Display information about the dataset
print(df.info())
# Display summary statistics of the dataset
print(df.describe())
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.25.2) Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2) Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0) Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (3.3) Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (3.8.1) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.11.4) Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0) Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3) Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25) Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.6.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0) Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1) Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.1) Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3) Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (71.0.4) Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0) Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.12.2) Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.37.1) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.64.1) Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2) Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk) (8.1.7) Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk) (2024.5.15) Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk) (4.66.4) Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0) Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0) Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.1) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0) Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2) Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.3) Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.4.0) Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0) Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9) Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.7) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.7.4) Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5) Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0) Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)
[nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Package stopwords is already up-to-date!
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
In [ ]:
# Step 1: Setting Up the Environment
!pip install numpy pandas scikit-learn tensorflow networkx nltk
import numpy as np
import pandas as pd
import networkx as nx
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model
import re
import nltk
from nltk.corpus import stopwords
# Download NLTK stopwords
nltk.download('stopwords')
STOPWORDS = set(stopwords.words('english'))
# Step 2: Loading the Dataset
from google.colab import files
uploaded = files.upload()
# Load the dataset from the uploaded file
df = pd.read_csv('/content/fake.csv')
# Step 3: Inspecting the Dataset
# Display the first few rows of the dataset
print(df.head())
# Display information about the dataset
print(df.info())
# Display summary statistics of the dataset
print(df.describe())
# Step 4: Data Preprocessing
# Handle Missing Values
# Drop rows with missing values
df.dropna(inplace=True)
# Text Cleaning Function
def clean_text(text):
# Remove special characters and numbers
text = re.sub(r'[^a-zA-Z\s]', '', text)
# Convert to lowercase
text = text.lower()
# Remove stopwords
text = ' '.join(word for word in text.split() if word not in STOPWORDS)
return text
# Apply text cleaning to 'title' and 'text' columns
df['cleaned_title'] = df['title'].apply(clean_text)
df['cleaned_text'] = df['text'].apply(clean_text)
# Combine 'title' and 'text' columns
df['combined_text'] = df['cleaned_title'] + ' ' + df['cleaned_text']
# Display the first few rows of the cleaned data
print(df[['title', 'cleaned_title', 'text', 'cleaned_text', 'combined_text']].head())
# Optional: Label Encode the 'subject' column if used as a feature
from sklearn.preprocessing import LabelEncoder
label_encoder = LabelEncoder()
df['subject_encoded'] = label_encoder.fit_transform(df['subject'])
# Display the first few rows of the dataset with encoded subject
print(df[['subject', 'subject_encoded']].head())
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.25.2) Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2) Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0) Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (3.3) Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (3.8.1) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.11.4) Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0) Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3) Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25) Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.6.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0) Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1) Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.1) Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3) Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (71.0.4) Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0) Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.12.2) Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.37.1) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.64.1) Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2) Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk) (8.1.7) Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk) (2024.5.15) Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk) (4.66.4) Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0) Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0) Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.1) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0) Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2) Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.3) Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.4.0) Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0) Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9) Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.7) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.7.4) Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5) Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0) Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)
[nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Package stopwords is already up-to-date!
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
cleaned_title \
0 donald trump sends embarrassing new years eve ...
1 drunk bragging trump staffer started russian c...
2 sheriff david clarke becomes internet joke thr...
3 trump obsessed even obamas name coded website ...
4 pope francis called donald trump christmas speech
text \
0 Donald Trump just couldn t wish all Americans ...
1 House Intelligence Committee Chairman Devin Nu...
2 On Friday, it was revealed that former Milwauk...
3 On Christmas day, Donald Trump announced that ...
4 Pope Francis used his annual Christmas Day mes...
cleaned_text \
0 donald trump wish americans happy new year lea...
1 house intelligence committee chairman devin nu...
2 friday revealed former milwaukee sheriff david...
3 christmas day donald trump announced would bac...
4 pope francis used annual christmas day message...
combined_text
0 donald trump sends embarrassing new years eve ...
1 drunk bragging trump staffer started russian c...
2 sheriff david clarke becomes internet joke thr...
3 trump obsessed even obamas name coded website ...
4 pope francis called donald trump christmas spe...
subject subject_encoded
0 News 2
1 News 2
2 News 2
3 News 2
4 News 2
In [ ]:
# Step 1: Setting Up the Environment
!pip install numpy pandas scikit-learn tensorflow networkx nltk
import numpy as np
import pandas as pd
import networkx as nx
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Dropout
from tensorflow.keras.models import Model
import re
import nltk
from nltk.corpus import stopwords
# Download NLTK stopwords
nltk.download('stopwords')
STOPWORDS = set(stopwords.words('english'))
# Step 2: Loading the Dataset
from google.colab import files
uploaded = files.upload()
# Load the dataset from the uploaded file
df = pd.read_csv('/content/fake.csv')
# Step 3: Inspecting the Dataset
# Display the first few rows of the dataset
print(df.head())
# Display information about the dataset
print(df.info())
# Display summary statistics of the dataset
print(df.describe())
# Step 4: Data Preprocessing
# Handle Missing Values
# Drop rows wi
Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (1.25.2) Requirement already satisfied: pandas in /usr/local/lib/python3.10/dist-packages (2.0.3) Requirement already satisfied: scikit-learn in /usr/local/lib/python3.10/dist-packages (1.2.2) Requirement already satisfied: tensorflow in /usr/local/lib/python3.10/dist-packages (2.15.0) Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (3.3) Requirement already satisfied: nltk in /usr/local/lib/python3.10/dist-packages (3.8.1) Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.10/dist-packages (from pandas) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2023.4) Requirement already satisfied: tzdata>=2022.1 in /usr/local/lib/python3.10/dist-packages (from pandas) (2024.1) Requirement already satisfied: scipy>=1.3.2 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.11.4) Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (1.4.2) Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn) (3.5.0) Requirement already satisfied: absl-py>=1.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.4.0) Requirement already satisfied: astunparse>=1.6.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.6.3) Requirement already satisfied: flatbuffers>=23.5.26 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.3.25) Requirement already satisfied: gast!=0.5.0,!=0.5.1,!=0.5.2,>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.6.0) Requirement already satisfied: google-pasta>=0.1.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: h5py>=2.9.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.9.0) Requirement already satisfied: libclang>=13.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (18.1.1) Requirement already satisfied: ml-dtypes~=0.2.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.2.0) Requirement already satisfied: opt-einsum>=2.3.2 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.3.0) Requirement already satisfied: packaging in /usr/local/lib/python3.10/dist-packages (from tensorflow) (24.1) Requirement already satisfied: protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (3.20.3) Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from tensorflow) (71.0.4) Requirement already satisfied: six>=1.12.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.16.0) Requirement already satisfied: termcolor>=1.1.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.4.0) Requirement already satisfied: typing-extensions>=3.6.6 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (4.12.2) Requirement already satisfied: wrapt<1.15,>=1.11.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.14.1) Requirement already satisfied: tensorflow-io-gcs-filesystem>=0.23.1 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (0.37.1) Requirement already satisfied: grpcio<2.0,>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (1.64.1) Requirement already satisfied: tensorboard<2.16,>=2.15 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.2) Requirement already satisfied: tensorflow-estimator<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: keras<2.16,>=2.15.0 in /usr/local/lib/python3.10/dist-packages (from tensorflow) (2.15.0) Requirement already satisfied: click in /usr/local/lib/python3.10/dist-packages (from nltk) (8.1.7) Requirement already satisfied: regex>=2021.8.3 in /usr/local/lib/python3.10/dist-packages (from nltk) (2024.5.15) Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from nltk) (4.66.4) Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.10/dist-packages (from astunparse>=1.6.0->tensorflow) (0.43.0) Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.27.0) Requirement already satisfied: google-auth-oauthlib<2,>=0.5 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (1.2.1) Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.6) Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (2.31.0) Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (0.7.2) Requirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard<2.16,>=2.15->tensorflow) (3.0.3) Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (5.4.0) Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.4.0) Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (4.9) Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (1.3.1) Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (3.7) Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2.0.7) Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2.21.0->tensorboard<2.16,>=2.15->tensorflow) (2024.7.4) Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard<2.16,>=2.15->tensorflow) (2.1.5) Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard<2.16,>=2.15->tensorflow) (0.6.0) Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<2,>=0.5->tensorboard<2.16,>=2.15->tensorflow) (3.2.2)
[nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Package stopwords is already up-to-date!
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
In [ ]:
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
import networkx as nx
import numpy as np
In [ ]:
X = df['text'] # Using 'text' column for content
y = df['subject'] # Using 'subject' column as the target variable
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
In [ ]:
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
TF-IDF Train shape: (18784, 5000) TF-IDF Test shape: (4697, 5000)
In [ ]:
import networkx as nx
print(nx.__version__)
3.3
In [ ]:
import pandas as pd
import networkx as nx
# Sample DataFrame (assuming df is defined)
df = pd.DataFrame({
'text': ['Tweet1', 'Tweet2', 'Tweet3', 'Tweet4'],
'subject': ['Subject1', 'Subject2', 'Subject3', 'Subject4']
})
# Create a new graph
G = nx.Graph()
# Add nodes (tweets) to the graph
for i, row in df.iterrows():
G.add_node(i, text=row['text'], subject=row['subject'])
# Assuming interaction data exists in the form of (user1, user2, interaction)
# For demonstration, let's assume we have interaction pairs (we'll simulate this part)
interaction_data = [(i, i+1) for i in range(len(df)-1)] # Simulated interaction pairs
# Add edges (interactions) to the graph
G.add_edges_from(interaction_data)
# Display graph information using NetworkX 3.3 methods
# Number of nodes and edges
print(f"Number of nodes: {G.number_of_nodes()}")
print(f"Number of edges: {G.number_of_edges()}")
# Node and edge data
print("\nNodes:")
for node, data in G.nodes(data=True):
print(node, data)
print("\nEdges:")
for edge in G.edges(data=True):
print(edge)
# Using graph properties to describe the graph
print("\nGraph Properties:")
print(f"Is the graph directed? {G.is_directed()}")
print(f"Graph density: {nx.density(G)}")
print(f"Average clustering coefficient: {nx.average_clustering(G)}")
Number of nodes: 4
Number of edges: 3
Nodes:
0 {'text': 'Tweet1', 'subject': 'Subject1'}
1 {'text': 'Tweet2', 'subject': 'Subject2'}
2 {'text': 'Tweet3', 'subject': 'Subject3'}
3 {'text': 'Tweet4', 'subject': 'Subject4'}
Edges:
(0, 1, {})
(1, 2, {})
(2, 3, {})
Graph Properties:
Is the graph directed? False
Graph density: 0.5
Average clustering coefficient: 0.0
In [ ]:
import matplotlib.pyplot as plt
# Draw the graph
pos = nx.spring_layout(G) # Define the layout
nx.draw(G, pos, with_labels=True, node_color='lightblue', edge_color='gray', node_size=500, font_size=10, font_weight='bold')
# Draw node labels
labels = nx.get_node_attributes(G, 'text')
nx.draw_networkx_labels(G, pos, labels, font_size=10, font_weight='bold')
plt.title('Graph Visualization')
plt.show()
In [ ]:
# Calculate centrality measures
degree_centrality = nx.degree_centrality(G)
betweenness_centrality = nx.betweenness_centrality(G)
closeness_centrality = nx.closeness_centrality(G)
# Print centrality measures
print("Degree Centrality:", degree_centrality)
print("Betweenness Centrality:", betweenness_centrality)
print("Closeness Centrality:", closeness_centrality)
# Calculate shortest path lengths (if the graph is connected)
if nx.is_connected(G):
shortest_paths = dict(nx.all_pairs_shortest_path_length(G))
print("Shortest Paths:", shortest_paths)
Degree Centrality: {0: 0.3333333333333333, 1: 0.6666666666666666, 2: 0.6666666666666666, 3: 0.3333333333333333}
Betweenness Centrality: {0: 0.0, 1: 0.6666666666666666, 2: 0.6666666666666666, 3: 0.0}
Closeness Centrality: {0: 0.5, 1: 0.75, 2: 0.75, 3: 0.5}
Shortest Paths: {0: {0: 0, 1: 1, 2: 2, 3: 3}, 1: {1: 0, 0: 1, 2: 1, 3: 2}, 2: {2: 0, 1: 1, 3: 1, 0: 2}, 3: {3: 0, 2: 1, 1: 2, 0: 3}}
In [ ]:
# Extract node features
node_features = nx.get_node_attributes(G, 'text')
node_features = pd.DataFrame.from_dict(node_features, orient='index', columns=['text'])
# Example of feature transformation (e.g., convert text to numerical features)
from sklearn.feature_extraction.text import TfidfVectorizer
vectorizer = TfidfVectorizer()
X_text = vectorizer.fit_transform(node_features['text']).toarray()
# Combine with any other features if available
# Assuming other_features is a DataFrame with additional features
# combined_features = np.hstack([X_text, other_features.values])
In [ ]:
import pandas as pd
import matplotlib.pyplot as plt
import networkx as nx
from networkx.algorithms.community import greedy_modularity_communities
# Load your dataset
df = pd.read_csv('/content/fake.csv')
# Display the columns to understand what data is available
print("Columns in DataFrame:", df.columns)
# Create a graph based on the available data
G = nx.Graph()
# Add nodes with unique identifiers and some attributes from the dataset
for idx, row in df.iterrows():
G.add_node(idx, title=row['title'], subject=row['subject'])
# Simulate connections (edges) between consecutive rows
edges = [(i, i+1) for i in range(len(df)-1)]
G.add_edges_from(edges)
# Step 2: Detect Communities
communities = list(greedy_modularity_communities(G))
# Create a community map to color nodes
community_map = {node: idx for idx, comm in enumerate(communities) for node in comm}
colors = [community_map[node] for node in G.nodes()]
# Step 3: Visualize the Communities
color_map = plt.cm.get_cmap('Set3', len(communities))
plt.figure(figsize=(12, 10))
pos = nx.spring_layout(G) # Layout for the graph
nx.draw(G, pos, with_labels=True, node_color=colors, cmap=color_map, node_size=700, font_size=15, font_weight='bold', edge_color='gray')
# Add a color bar to indicate the communities
sm = plt.cm.ScalarMappable(cmap=color_map, norm=plt.Normalize(vmin=0, vmax=len(communities) - 1))
sm.set_array([])
plt.colorbar(sm, ticks=range(len(communities)), label='Community')
plt.title('Community Detection Visualization')
plt.show()
Columns in DataFrame: Index(['title', 'text', 'subject', 'date'], dtype='object')
<ipython-input-32-59d841ec2bd2>:31: MatplotlibDeprecationWarning: The get_cmap function was deprecated in Matplotlib 3.7 and will be removed two minor releases later. Use ``matplotlib.colormaps[name]`` or ``matplotlib.colormaps.get_cmap(obj)`` instead.
color_map = plt.cm.get_cmap('Set3', len(communities))
<ipython-input-32-59d841ec2bd2>:40: MatplotlibDeprecationWarning: Unable to determine Axes to steal space for Colorbar. Using gca(), but will raise in the future. Either provide the *cax* argument to use as the Axes for the Colorbar, provide the *ax* argument to steal space from it, or add *mappable* to an Axes.
plt.colorbar(sm, ticks=range(len(communities)), label='Community')
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
In [ ]:
import networkx as nx
import pandas as pd
from networkx.algorithms.community import modularity
# Calculate and print modularity score
modularity_score = modularity(G, communities)
print(f"Modularity Score: {modularity_score}")
# Extract and analyze community attributes
community_analysis = []
for comm_id, comm in enumerate(communities):
comm_nodes = list(comm)
# Extract attributes for the current community
node_attributes = [G.nodes[node] for node in comm_nodes]
subjects = [attr['subject'] for attr in node_attributes]
community_analysis.append({
'community_id': comm_id,
'num_nodes': len(comm_nodes),
'most_common_subject': pd.Series(subjects).mode()[0]
})
# Convert to DataFrame for better readability
community_df = pd.DataFrame(community_analysis)
print(community_df)
# Plot a histogram of community sizes
sizes = community_df['num_nodes']
plt.figure(figsize=(10, 6))
plt.hist(sizes, bins=len(community_df), edgecolor='black')
plt.title('Histogram of Community Sizes')
plt.xlabel('Community Size')
plt.ylabel('Frequency')
plt.show()
Modularity Score: 0.9867784817772379
community_id num_nodes most_common_subject
0 0 185 Middle-east
1 1 128 News
2 2 128 News
3 3 128 News
4 4 128 News
.. ... ... ...
178 178 128 Middle-east
179 179 128 Middle-east
180 180 128 Middle-east
181 181 128 Middle-east
182 182 128 Middle-east
[183 rows x 3 columns]
In [ ]:
import pandas as pd
import numpy as np
import re
import networkx as nx
import matplotlib.pyplot as plt
import seaborn as sns
import community as community_louvain # For Louvain method
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from networkx.algorithms.community import asyn_lpa_communities, greedy_modularity_communities
# Load and prepare the dataset
df = pd.read_csv('/content/fake.csv')
# Clean text data
def clean_text(text):
text = re.sub(r'http\S+', '', text)
text = re.sub(r'[^a-zA-Z\s]', '', text)
text = text.lower()
return text
df['cleaned_text'] = df['text'].apply(clean_text)
# Text vectorization
vectorizer = TfidfVectorizer()
X_text = vectorizer.fit_transform(df['cleaned_text'])
cosine_sim = cosine_similarity(X_text)
# Create graph
G = nx.Graph()
for i, row in df.iterrows():
G.add_node(i, text=row['text'], subject=row['subject'])
# Add edges based on cosine similarity
threshold = 0.5
for i in range(len(df)):
for j in range(i+1, len(df)):
if cosine_sim[i, j] > threshold:
G.add_edge(i, j, weight=cosine_sim[i, j])
# Community detection
partition = community_louvain.best_partition(G)
louvain_communities = {}
for node, comm_id in partition.items():
if comm_id not in louvain_communities:
louvain_communities[comm_id] = []
louvain_communities[comm_id].append(node)
lpa_communities = list(asyn_lpa_communities(G))
lpa_communities_dict = {i: list(comm) for i, comm in enumerate(lpa_communities)}
greedy_communities = list(greedy_modularity_communities(G))
greedy_communities_dict = {i: list(comm) for i, comm in enumerate(greedy_communities)}
# Visualizations
# Basic Network Graph
def plot_network(G, title):
pos = nx.spring_layout(G, seed=42)
plt.figure(figsize=(12, 8))
nx.draw(G, pos, with_labels=True, node_color='lightblue', edge_color='gray', node_size=50, font_size=8)
plt.title(title)
plt.show()
# Community Detection Graphs
def plot_communities(G, communities, title):
pos = nx.spring_layout(G, seed=42)
plt.figure(figsize=(12, 8))
for comm_id, comm in communities.items():
nx.draw_networkx_nodes(G, pos, nodelist=comm, node_color=f'C{comm_id}', label=f'Community {comm_id}')
nx.draw_networkx_edges(G, pos, alpha=0.5)
nx.draw_networkx_labels(G, pos, {node: node for node in G.nodes()})
plt.title(title)
plt.legend()
plt.show()
# Plot graphs
plot_network(G, 'Basic Network Graph')
plot_communities(G, louvain_communities, 'Louvain Community Detection')
plot_communities(G, lpa_communities_dict, 'LPA Community Detection')
plot_communities(G, greedy_communities_dict, 'Greedy Modularity Community Detection')
# Cosine Similarity Heatmap
plt.figure(figsize=(10, 8))
sns.heatmap(cosine_sim, cmap='viridis', square=True)
plt.title('Cosine Similarity Heatmap')
plt.show()
--------------------------------------------------------------------------- AttributeError Traceback (most recent call last) <ipython-input-1-4f6b8c389b33> in <cell line: 42>() 40 41 # Community detection ---> 42 partition = community_louvain.best_partition(G) 43 louvain_communities = {} 44 for node, comm_id in partition.items(): AttributeError: module 'community' has no attribute 'best_partition'
05th AUgust 2024 step by step implementation
Step 1:Mount Google Drive and Load the Dataset
In [ ]:
from google.colab import drive
drive.mount('/content/drive')
# Define the path to your dataset
dataset_path = '/content/fake.csv'
# Load the dataset
import pandas as pd
df = pd.read_csv(dataset_path)
print("Dataset loaded successfully!")
Drive already mounted at /content/drive; to attempt to forcibly remount, call drive.mount("/content/drive", force_remount=True).
Dataset loaded successfully!
Step 2: Display Basic Dataset Information
In [ ]:
# Display the first few rows of the dataset
print(df.head())
# Display information about the dataset
print(df.info())
# Display summary statistics of the dataset
print(df.describe())
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
Step 3: Preprocess the Data
In [ ]:
# Clean the text data
import re
def clean_text(text):
text = re.sub(r'http\S+', '', text) # Remove URLs
text = re.sub(r'\W', ' ', text) # Remove special characters
text = re.sub(r'\s+', ' ', text) # Remove extra spaces
text = text.lower().strip() # Convert to lowercase and strip whitespace
return text
df['cleaned_text'] = df['text'].apply(clean_text)
print("Text data cleaned!")
Text data cleaned!
Step 4: Split the Data and Extract Features
In [ ]:
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
# Splitting the Data
X = df['cleaned_text']
y = df['subject']
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
# Feature Extraction using TF-IDF Vectorization
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
TF-IDF Train shape: (18784, 5000) TF-IDF Test shape: (4697, 5000)
Step 5: Build and Analyze Graphs
In [ ]:
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
# Splitting the Data
X = df['text'] # Using 'text' column for content
y = df['subject'] # Using 'subject' column as the target variable
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
# Feature Extraction using TF-IDF Vectorization
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
# Graph Construction
# Create an empty graph
G = nx.Graph()
# Add nodes (tweets) to the graph
for i, row in df.iterrows():
G.add_node(i, text=row['text'], subject=row['subject'])
# Assuming interaction data exists in the form of (user1, user2, interaction)
# For demonstration, let's assume we have interaction pairs (we'll simulate this part)
interaction_data = [(i, i+1) for i in range(len(df)-1)] # Simulated interaction pairs
# Add edges (interactions) to the graph
G.add_edges_from(interaction_data)
# Manually display the graph information
num_nodes = G.number_of_nodes()
num_edges = G.number_of_edges()
print(f'Graph has {num_nodes} nodes and {num_edges} edges')
# Plot the degree distribution
degree_sequence = sorted([d for n, d in G.degree()], reverse=True) # degree sequence
degree_count = np.bincount(degree_sequence)
degree = np.arange(len(degree_count))
plt.figure(figsize=(10, 6))
plt.bar(degree, degree_count, width=0.80, color='b')
plt.title("Degree Histogram")
plt.ylabel("Count")
plt.xlabel("Degree")
plt.show()
# Plot the graph structure
plt.figure(figsize=(12, 12))
pos = nx.spring_layout(G, k=0.15)
nx.draw(G, pos, with_labels=False, node_size=20, node_color='blue', edge_color='gray')
plt.title('Graph Structure Visualization')
plt.show()
TF-IDF Train shape: (18784, 5000) TF-IDF Test shape: (4697, 5000) Graph has 23481 nodes and 23480 edges
Step 5: Splitting the Data and Feature Extraction(fake news identification with red color)
Step 6: Model Training and Evaluation
In [ ]:
# Step 6: Model Training and Evaluation
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
# 6.1 Training a Classifier Using TF-IDF Features
# Initialize the Logistic Regression model
model = LogisticRegression(max_iter=1000)
# Train the model on the training data
model.fit(X_train_tfidf, y_train)
# 6.2 Model Evaluation
# Predict on the test data
y_pred = model.predict(X_test_tfidf)
# Calculate accuracy
accuracy = accuracy_score(y_test, y_pred)
print(f'Accuracy: {accuracy:.4f}')
# Generate classification report
print("Classification Report:")
print(classification_report(y_test, y_pred))
# Confusion Matrix
conf_matrix = confusion_matrix(y_test, y_pred)
# Plot Confusion Matrix
plt.figure(figsize=(8, 6))
sns.heatmap(conf_matrix, annot=True, fmt='d', cmap='Blues', xticklabels=label_encoder.classes_, yticklabels=label_encoder.classes_)
plt.title('Confusion Matrix')
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
# 6.3 Incorporating Graph-Based Features (Optional)
# Example of a simple graph-based feature: degree centrality
degree_centrality = nx.degree_centrality(G)
# Convert degree centrality to a numpy array aligned with the TF-IDF features
X_train_graph = np.array([degree_centrality[i] for i in X_train.index])
X_test_graph = np.array([degree_centrality[i] for i in X_test.index])
# Combine TF-IDF features with graph features
X_train_combined = np.hstack((X_train_tfidf, X_train_graph.reshape(-1, 1)))
X_test_combined = np.hstack((X_test_tfidf, X_test_graph.reshape(-1, 1)))
# Train a new model using the combined features
model_combined = LogisticRegression(max_iter=1000)
model_combined.fit(X_train_combined, y_train)
# Predict on the test data with the combined model
y_pred_combined = model_combined.predict(X_test_combined)
# Calculate accuracy for the combined model
accuracy_combined = accuracy_score(y_test, y_pred_combined)
print(f'Combined Model Accuracy: {accuracy_combined:.4f}')
# Generate classification report for the combined model
print("Combined Model Classification Report:")
print(classification_report(y_test, y_pred_combined))
# Confusion Matrix for the combined model
conf_matrix_combined = confusion_matrix(y_test, y_pred_combined)
# Plot Confusion Matrix for the combined model
plt.figure(figsize=(8, 6))
sns.heatmap(conf_matrix_combined, annot=True, fmt='d', cmap='Blues', xticklabels=label_encoder.classes_, yticklabels=label_encoder.classes_)
plt.title('Combined Model Confusion Matrix')
plt.xlabel('Predicted')
plt.ylabel('True')
plt.show()
Accuracy: 0.6129
Classification Report:
precision recall f1-score support
0 0.24 0.06 0.10 316
1 0.15 0.14 0.15 159
2 0.93 0.98 0.95 1821
3 0.11 0.09 0.10 160
4 0.27 0.16 0.20 897
5 0.48 0.66 0.56 1344
accuracy 0.61 4697
macro avg 0.36 0.35 0.34 4697
weighted avg 0.57 0.61 0.58 4697
Combined Model Accuracy: 0.6129
Combined Model Classification Report:
precision recall f1-score support
0 0.24 0.06 0.10 316
1 0.15 0.14 0.15 159
2 0.93 0.98 0.95 1821
3 0.11 0.09 0.10 160
4 0.27 0.16 0.20 897
5 0.48 0.66 0.56 1344
accuracy 0.61 4697
macro avg 0.36 0.35 0.34 4697
weighted avg 0.57 0.61 0.58 4697
Graph Visualization- Highlighting Fake News Nodes
In [ ]:
# Plot the graph with nodes colored by the subject
plt.figure(figsize=(12, 12))
# Color mapping for subjects
color_map = ['red' if label == 'Fake' else 'blue' for label in df['subject']]
# Draw the graph
pos = nx.spring_layout(G, seed=42) # positions for all nodes
nx.draw_networkx_nodes(G, pos, node_color=color_map, node_size=20, alpha=0.6)
nx.draw_networkx_edges(G, pos, alpha=0.3)
plt.title('Graph Visualization with Fake News Highlighted in Red')
plt.show()
In [ ]:
!jupyter nbconvert --to html your_script.ipynb
[NbConvertApp] WARNING | pattern 'your_script.ipynb' matched no files
This application is used to convert notebook files (*.ipynb)
to various other formats.
WARNING: THE COMMANDLINE INTERFACE MAY CHANGE IN FUTURE RELEASES.
Options
=======
The options below are convenience aliases to configurable class-options,
as listed in the "Equivalent to" description-line of the aliases.
To see all configurable class-options for some <cmd>, use:
<cmd> --help-all
--debug
set log level to logging.DEBUG (maximize logging output)
Equivalent to: [--Application.log_level=10]
--show-config
Show the application's configuration (human-readable format)
Equivalent to: [--Application.show_config=True]
--show-config-json
Show the application's configuration (json format)
Equivalent to: [--Application.show_config_json=True]
--generate-config
generate default config file
Equivalent to: [--JupyterApp.generate_config=True]
-y
Answer yes to any questions instead of prompting.
Equivalent to: [--JupyterApp.answer_yes=True]
--execute
Execute the notebook prior to export.
Equivalent to: [--ExecutePreprocessor.enabled=True]
--allow-errors
Continue notebook execution even if one of the cells throws an error and include the error message in the cell output (the default behaviour is to abort conversion). This flag is only relevant if '--execute' was specified, too.
Equivalent to: [--ExecutePreprocessor.allow_errors=True]
--stdin
read a single notebook file from stdin. Write the resulting notebook with default basename 'notebook.*'
Equivalent to: [--NbConvertApp.from_stdin=True]
--stdout
Write notebook output to stdout instead of files.
Equivalent to: [--NbConvertApp.writer_class=StdoutWriter]
--inplace
Run nbconvert in place, overwriting the existing notebook (only
relevant when converting to notebook format)
Equivalent to: [--NbConvertApp.use_output_suffix=False --NbConvertApp.export_format=notebook --FilesWriter.build_directory=]
--clear-output
Clear output of current file and save in place,
overwriting the existing notebook.
Equivalent to: [--NbConvertApp.use_output_suffix=False --NbConvertApp.export_format=notebook --FilesWriter.build_directory= --ClearOutputPreprocessor.enabled=True]
--no-prompt
Exclude input and output prompts from converted document.
Equivalent to: [--TemplateExporter.exclude_input_prompt=True --TemplateExporter.exclude_output_prompt=True]
--no-input
Exclude input cells and output prompts from converted document.
This mode is ideal for generating code-free reports.
Equivalent to: [--TemplateExporter.exclude_output_prompt=True --TemplateExporter.exclude_input=True --TemplateExporter.exclude_input_prompt=True]
--allow-chromium-download
Whether to allow downloading chromium if no suitable version is found on the system.
Equivalent to: [--WebPDFExporter.allow_chromium_download=True]
--disable-chromium-sandbox
Disable chromium security sandbox when converting to PDF..
Equivalent to: [--WebPDFExporter.disable_sandbox=True]
--show-input
Shows code input. This flag is only useful for dejavu users.
Equivalent to: [--TemplateExporter.exclude_input=False]
--embed-images
Embed the images as base64 dataurls in the output. This flag is only useful for the HTML/WebPDF/Slides exports.
Equivalent to: [--HTMLExporter.embed_images=True]
--sanitize-html
Whether the HTML in Markdown cells and cell outputs should be sanitized..
Equivalent to: [--HTMLExporter.sanitize_html=True]
--log-level=<Enum>
Set the log level by value or name.
Choices: any of [0, 10, 20, 30, 40, 50, 'DEBUG', 'INFO', 'WARN', 'ERROR', 'CRITICAL']
Default: 30
Equivalent to: [--Application.log_level]
--config=<Unicode>
Full path of a config file.
Default: ''
Equivalent to: [--JupyterApp.config_file]
--to=<Unicode>
The export format to be used, either one of the built-in formats
['asciidoc', 'custom', 'html', 'latex', 'markdown', 'notebook', 'pdf', 'python', 'rst', 'script', 'slides', 'webpdf']
or a dotted object name that represents the import path for an
``Exporter`` class
Default: ''
Equivalent to: [--NbConvertApp.export_format]
--template=<Unicode>
Name of the template to use
Default: ''
Equivalent to: [--TemplateExporter.template_name]
--template-file=<Unicode>
Name of the template file to use
Default: None
Equivalent to: [--TemplateExporter.template_file]
--theme=<Unicode>
Template specific theme(e.g. the name of a JupyterLab CSS theme distributed
as prebuilt extension for the lab template)
Default: 'light'
Equivalent to: [--HTMLExporter.theme]
--sanitize_html=<Bool>
Whether the HTML in Markdown cells and cell outputs should be sanitized.This
should be set to True by nbviewer or similar tools.
Default: False
Equivalent to: [--HTMLExporter.sanitize_html]
--writer=<DottedObjectName>
Writer class used to write the
results of the conversion
Default: 'FilesWriter'
Equivalent to: [--NbConvertApp.writer_class]
--post=<DottedOrNone>
PostProcessor class used to write the
results of the conversion
Default: ''
Equivalent to: [--NbConvertApp.postprocessor_class]
--output=<Unicode>
overwrite base name use for output files.
can only be used when converting one notebook at a time.
Default: ''
Equivalent to: [--NbConvertApp.output_base]
--output-dir=<Unicode>
Directory to write output(s) to. Defaults
to output to the directory of each notebook. To recover
previous default behaviour (outputting to the current
working directory) use . as the flag value.
Default: ''
Equivalent to: [--FilesWriter.build_directory]
--reveal-prefix=<Unicode>
The URL prefix for reveal.js (version 3.x).
This defaults to the reveal CDN, but can be any url pointing to a copy
of reveal.js.
For speaker notes to work, this must be a relative path to a local
copy of reveal.js: e.g., "reveal.js".
If a relative path is given, it must be a subdirectory of the
current directory (from which the server is run).
See the usage documentation
(https://nbconvert.readthedocs.io/en/latest/usage.html#reveal-js-html-slideshow)
for more details.
Default: ''
Equivalent to: [--SlidesExporter.reveal_url_prefix]
--nbformat=<Enum>
The nbformat version to write.
Use this to downgrade notebooks.
Choices: any of [1, 2, 3, 4]
Default: 4
Equivalent to: [--NotebookExporter.nbformat_version]
Examples
--------
The simplest way to use nbconvert is
> jupyter nbconvert mynotebook.ipynb --to html
Options include ['asciidoc', 'custom', 'html', 'latex', 'markdown', 'notebook', 'pdf', 'python', 'rst', 'script', 'slides', 'webpdf'].
> jupyter nbconvert --to latex mynotebook.ipynb
Both HTML and LaTeX support multiple output templates. LaTeX includes
'base', 'article' and 'report'. HTML includes 'basic', 'lab' and
'classic'. You can specify the flavor of the format used.
> jupyter nbconvert --to html --template lab mynotebook.ipynb
You can also pipe the output to stdout, rather than a file
> jupyter nbconvert mynotebook.ipynb --stdout
PDF is generated via latex
> jupyter nbconvert mynotebook.ipynb --to pdf
You can get (and serve) a Reveal.js-powered slideshow
> jupyter nbconvert myslides.ipynb --to slides --post serve
Multiple notebooks can be given at the command line in a couple of
different ways:
> jupyter nbconvert notebook*.ipynb
> jupyter nbconvert notebook1.ipynb notebook2.ipynb
or you can specify the notebooks list in a config file, containing::
c.NbConvertApp.notebooks = ["my_notebook.ipynb"]
> jupyter nbconvert --config mycfg.py
To see all available configurables, use `--help-all`.
In [ ]:
from google.colab import files
uploaded = files.upload()
Saving RK_Fakenews.ipynb to RK_Fakenews.ipynb
In [ ]:
# Step 1: Upload the Jupyter Notebook file
from google.colab import files
uploaded = files.upload()
# Step 2: Convert the Jupyter Notebook to HTML
!jupyter nbconvert --to html RK_Fakenews.ipynb
# Step 3: Download the generated HTML file
from google.colab import files
files.download('RK_Fakenews.html')
Saving RK_Fakenews.ipynb to RK_Fakenews (1).ipynb [NbConvertApp] Converting notebook RK_Fakenews.ipynb to html [NbConvertApp] Writing 4455909 bytes to RK_Fakenews.html
Chapter 4: Results and Analysis-4.1 Description of Data Used
In [ ]:
# Step 1: Import necessary libraries
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
import networkx as nx
import numpy as np
# Step 2: Load the dataset
df = pd.read_csv('/content/fake.csv')
# Step 3: Display basic information about the dataset
print(df.head())
print(df.info())
print(df.describe())
# Visualize the distribution of subjects
plt.figure(figsize=(10, 6))
sns.countplot(x='subject', data=df)
plt.title('Distribution of Subjects in the Dataset')
plt.xlabel('Subject')
plt.ylabel('Count')
plt.show()
# Step 4: Experimental Setup - Splitting the Data and Feature Extraction
X = df['text'] # Using 'text' column for content
y = df['subject'] # Using 'subject' column as the target variable
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
# Feature Extraction using TF-IDF Vectorization
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
# Step 5: Graph Construction
# Create an empty graph
G = nx.Graph()
# Add nodes (tweets) to the graph
for i, row in df.iterrows():
G.add_node(i, text=row['text'], subject=row['subject'])
# Assuming interaction data exists in the form of (user1, user2, interaction)
# For demonstration, let's assume we have interaction pairs (we'll simulate this part)
interaction_data = [(i, i+1) for i in range(len(df)-1)] # Simulated interaction pairs
# Add edges (interactions) to the graph
G.add_edges_from(interaction_data)
# Display the graph information
print(str(G))
# Step 6: Model Training and Evaluation
# Define the model
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
model = Sequential([
Dense(512, activation='relu', input_shape=(X_train_tfidf.shape[1],)),
Dropout(0.5),
Dense(256, activation='relu'),
Dropout(0.5),
Dense(len(label_encoder.classes_), activation='softmax')
])
# Compile the model
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Train the model
history = model.fit(X_train_tfidf, y_train, epochs=10, batch_size=32, validation_split=0.2)
# Evaluate the model
loss, accuracy = model.evaluate(X_test_tfidf, y_test)
print(f'Test Accuracy: {accuracy:.4f}')
# Plotting the training history
plt.figure(figsize=(12, 6))
plt.plot(history.history['accuracy'], label='Train Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.title('Model Accuracy Over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Detailed performance metrics
from sklearn.metrics import classification_report
y_pred = model.predict(X_test_tfidf)
y_pred_classes = np.argmax(y_pred, axis=1)
print(classification_report(y_test, y_pred_classes, target_names=label_encoder.classes_))
# Comparative Analysis with Random Forest
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
rf_model = RandomForestClassifier(n_estimators=100)
rf_model.fit(X_train_tfidf, y_train)
rf_y_pred = rf_model.predict(X_test_tfidf)
rf_accuracy = accuracy_score(y_test, rf_y_pred)
print(f'Random Forest Accuracy: {rf_accuracy:.4f}')
print("Random Forest Classification Report:")
print(classification_report(y_test, rf_y_pred, target_names=label_encoder.classes_))
# Comparing performance
print(f'Neural Network Model Accuracy: {accuracy:.4f}')
print(f'Random Forest Model Accuracy: {rf_accuracy:.4f}')
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
TF-IDF Train shape: (18784, 5000) TF-IDF Test shape: (4697, 5000) Graph with 23481 nodes and 23480 edges
/usr/local/lib/python3.10/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Epoch 1/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 23s 44ms/step - accuracy: 0.5478 - loss: 1.0880 - val_accuracy: 0.6790 - val_loss: 0.6326 Epoch 2/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 43s 49ms/step - accuracy: 0.7103 - loss: 0.5640 - val_accuracy: 0.6676 - val_loss: 0.6323 Epoch 3/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 39s 46ms/step - accuracy: 0.7446 - loss: 0.5030 - val_accuracy: 0.6572 - val_loss: 0.6460 Epoch 4/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 42s 48ms/step - accuracy: 0.7556 - loss: 0.4611 - val_accuracy: 0.6524 - val_loss: 0.6833 Epoch 5/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 18s 39ms/step - accuracy: 0.7728 - loss: 0.4338 - val_accuracy: 0.6141 - val_loss: 0.7536 Epoch 6/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 20s 43ms/step - accuracy: 0.7911 - loss: 0.3975 - val_accuracy: 0.5864 - val_loss: 0.7744 Epoch 7/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 20s 43ms/step - accuracy: 0.7932 - loss: 0.3855 - val_accuracy: 0.5896 - val_loss: 0.9286 Epoch 8/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 23s 49ms/step - accuracy: 0.7982 - loss: 0.3749 - val_accuracy: 0.5760 - val_loss: 0.9560 Epoch 9/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 39s 45ms/step - accuracy: 0.8094 - loss: 0.3525 - val_accuracy: 0.5669 - val_loss: 1.0783 Epoch 10/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 40s 42ms/step - accuracy: 0.8030 - loss: 0.3429 - val_accuracy: 0.5800 - val_loss: 1.1588 147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 7ms/step - accuracy: 0.5861 - loss: 1.1544 Test Accuracy: 0.5770
147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 8ms/step precision recall f1-score support Government News 0.12 0.08 0.09 316 Middle-east 0.16 0.13 0.14 159 News 0.99 0.98 0.98 1821 US_News 0.27 0.32 0.29 160 left-news 0.18 0.14 0.16 897 politics 0.43 0.53 0.47 1344 accuracy 0.58 4697 macro avg 0.36 0.36 0.36 4697 weighted avg 0.56 0.58 0.57 4697 Random Forest Accuracy: 0.5382 Random Forest Classification Report: precision recall f1-score support Government News 0.01 0.01 0.01 316 Middle-east 0.10 0.10 0.10 159 News 0.96 0.99 0.98 1821 US_News 0.11 0.11 0.11 160 left-news 0.04 0.02 0.03 897 politics 0.38 0.49 0.43 1344 accuracy 0.54 4697 macro avg 0.27 0.29 0.28 4697 weighted avg 0.50 0.54 0.51 4697 Neural Network Model Accuracy: 0.5770 Random Forest Model Accuracy: 0.5382
In [ ]:
# Step 1: Import necessary libraries
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
import networkx as nx
import numpy as np
from sklearn.metrics import classification_report, confusion_matrix, ConfusionMatrixDisplay
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
# Step 2: Load the dataset
df = pd.read_csv('/content/fake.csv')
# Step 3: Display basic information about the dataset
print(df.head())
print(df.info())
print(df.describe())
# Visualize the distribution of subjects
plt.figure(figsize=(10, 6))
sns.countplot(x='subject', data=df)
plt.title('Distribution of Subjects in the Dataset')
plt.xlabel('Subject')
plt.ylabel('Count')
plt.show()
# Visualize the distribution of publication dates
df['date'] = pd.to_datetime(df['date'], errors='coerce') # Convert to datetime
plt.figure(figsize=(14, 7))
df['date'].value_counts().sort_index().plot()
plt.title('Distribution of Articles Over Time')
plt.xlabel('Date')
plt.ylabel('Count')
plt.show()
# Visualize the length of articles
df['text_length'] = df['text'].apply(len)
plt.figure(figsize=(10, 6))
sns.histplot(df['text_length'], bins=50, kde=True)
plt.title('Distribution of Article Lengths')
plt.xlabel('Text Length')
plt.ylabel('Count')
plt.show()
# Step 4: Experimental Setup - Splitting the Data and Feature Extraction
X = df['text'] # Using 'text' column for content
y = df['subject'] # Using 'subject' column as the target variable
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
# Feature Extraction using TF-IDF Vectorization
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
# Step 5: Graph Construction and Fake News Identification
# Create an empty graph
G = nx.Graph()
# Define fake news subjects for identification
fake_news_labels = ['fake', 'false'] # Example labels for fake news
# Add nodes (articles) to the graph
for i, row in df.iterrows():
is_fake = row['subject'].lower() in fake_news_labels
G.add_node(i, text=row['text'], subject=row['subject'], fake=is_fake)
# Assuming interaction data exists in the form of (user1, user2, interaction)
# For demonstration, let's assume we have interaction pairs (we'll simulate this part)
interaction_data = [(i, i+1) for i in range(len(df)-1)] # Simulated interaction pairs
# Add edges (interactions) to the graph
G.add_edges_from(interaction_data)
# Display the graph information
print(f"Number of nodes: {G.number_of_nodes()}")
print(f"Number of edges: {G.number_of_edges()}")
print(f"Graph nodes: {list(G.nodes(data=True))[:5]}") # Displaying first 5 nodes with data
print(f"Graph edges: {list(G.edges())[:5]}") # Displaying first 5 edges
# Draw the graph with fake news highlighted
plt.figure(figsize=(15, 10))
pos = nx.spring_layout(G)
# Color nodes based on fake news status
node_color = ['red' if G.nodes[node]['fake'] else 'green' for node in G.nodes]
nx.draw(G, pos, with_labels=False, node_size=50, node_color=node_color, edge_color='gray')
red_patch = plt.Line2D([0], [0], marker='o', color='w', label='Fake News', markersize=10, markerfacecolor='red')
green_patch = plt.Line2D([0], [0], marker='o', color='w', label='Real News', markersize=10, markerfacecolor='green')
plt.legend(handles=[red_patch, green_patch])
plt.title('Graph Representation with Fake News Highlighted')
plt.show()
# Step 6: Model Training and Evaluation
# Define the model
model = Sequential([
Dense(512, activation='relu', input_shape=(X_train_tfidf.shape[1],)),
Dropout(0.5),
Dense(256, activation='relu'),
Dropout(0.5),
Dense(len(label_encoder.classes_), activation='softmax')
])
# Compile the model
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Train the model
history = model.fit(X_train_tfidf, y_train, epochs=10, batch_size=32, validation_split=0.2)
# Evaluate the model
loss, accuracy = model.evaluate(X_test_tfidf, y_test)
print(f'Test Accuracy: {accuracy:.4f}')
# Plotting the training history
plt.figure(figsize=(12, 6))
plt.plot(history.history['accuracy'], label='Train Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.title('Model Accuracy Over Epochs')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Detailed performance metrics
y_pred = model.predict(X_test_tfidf)
y_pred_classes = np.argmax(y_pred, axis=1)
print(classification_report(y_test, y_pred_classes, target_names=label_encoder.classes_))
# Confusion Matrix for Neural Network
cm_nn = confusion_matrix(y_test, y_pred_classes)
cmd_nn = ConfusionMatrixDisplay(cm_nn, display_labels=label_encoder.classes_)
cmd_nn.plot()
plt.title('Confusion Matrix for Neural Network Model')
plt.show()
# Comparative Analysis with Random Forest
rf_model = RandomForestClassifier(n_estimators=100)
rf_model.fit(X_train_tfidf, y_train)
rf_y_pred = rf_model.predict(X_test_tfidf)
rf_accuracy = accuracy_score(y_test, rf_y_pred)
print(f'Random Forest Accuracy: {rf_accuracy:.4f}')
print("Random Forest Classification Report:")
print(classification_report(y_test, rf_y_pred, target_names=label_encoder.classes_))
# Confusion Matrix for Random Forest
cm_rf = confusion_matrix(y_test, rf_y_pred)
cmd_rf = ConfusionMatrixDisplay(cm_rf, display_labels=label_encoder.classes_)
cmd_rf.plot()
plt.title('Confusion Matrix for Random Forest Model')
plt.show()
# Comparing performance
print(f'Neural Network Model Accuracy: {accuracy:.4f}')
print(f'Random Forest Model Accuracy: {rf_accuracy:.4f}')
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
TF-IDF Train shape: (18784, 5000)
TF-IDF Test shape: (4697, 5000)
Number of nodes: 23481
Number of edges: 23480
Graph nodes: [(0, {'text': 'Donald Trump just couldn t wish all Americans a Happy New Year and leave it at that. Instead, he had to give a shout out to his enemies, haters and the very dishonest fake news media. The former reality show star had just one job to do and he couldn t do it. As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year, President Angry Pants tweeted. 2018 will be a great year for America! As our Country rapidly grows stronger and smarter, I want to wish all of my friends, supporters, enemies, haters, and even the very dishonest Fake News Media, a Happy and Healthy New Year. 2018 will be a great year for America! Donald J. Trump (@realDonaldTrump) December 31, 2017Trump s tweet went down about as welll as you d expect.What kind of president sends a New Year s greeting like this despicable, petty, infantile gibberish? Only Trump! His lack of decency won t even allow him to rise above the gutter long enough to wish the American citizens a happy new year! Bishop Talbert Swan (@TalbertSwan) December 31, 2017no one likes you Calvin (@calvinstowell) December 31, 2017Your impeachment would make 2018 a great year for America, but I ll also accept regaining control of Congress. Miranda Yaver (@mirandayaver) December 31, 2017Do you hear yourself talk? When you have to include that many people that hate you you have to wonder? Why do the they all hate me? Alan Sandoval (@AlanSandoval13) December 31, 2017Who uses the word Haters in a New Years wish?? Marlene (@marlene399) December 31, 2017You can t just say happy new year? Koren pollitt (@Korencarpenter) December 31, 2017Here s Trump s New Year s Eve tweet from 2016.Happy New Year to all, including to my many enemies and those who have fought me and lost so badly they just don t know what to do. Love! Donald J. Trump (@realDonaldTrump) December 31, 2016This is nothing new for Trump. He s been doing this for years.Trump has directed messages to his enemies and haters for New Year s, Easter, Thanksgiving, and the anniversary of 9/11. pic.twitter.com/4FPAe2KypA Daniel Dale (@ddale8) December 31, 2017Trump s holiday tweets are clearly not presidential.How long did he work at Hallmark before becoming President? Steven Goodine (@SGoodine) December 31, 2017He s always been like this . . . the only difference is that in the last few years, his filter has been breaking down. Roy Schulze (@thbthttt) December 31, 2017Who, apart from a teenager uses the term haters? Wendy (@WendyWhistles) December 31, 2017he s a fucking 5 year old Who Knows (@rainyday80) December 31, 2017So, to all the people who voted for this a hole thinking he would change once he got into power, you were wrong! 70-year-old men don t change and now he s a year older.Photo by Andrew Burton/Getty Images.', 'subject': 'News', 'fake': False}), (1, {'text': 'House Intelligence Committee Chairman Devin Nunes is going to have a bad day. He s been under the assumption, like many of us, that the Christopher Steele-dossier was what prompted the Russia investigation so he s been lashing out at the Department of Justice and the FBI in order to protect Trump. As it happens, the dossier is not what started the investigation, according to documents obtained by the New York Times.Former Trump campaign adviser George Papadopoulos was drunk in a wine bar when he revealed knowledge of Russian opposition research on Hillary Clinton.On top of that, Papadopoulos wasn t just a covfefe boy for Trump, as his administration has alleged. He had a much larger role, but none so damning as being a drunken fool in a wine bar. Coffee boys don t help to arrange a New York meeting between Trump and President Abdel Fattah el-Sisi of Egypt two months before the election. It was known before that the former aide set up meetings with world leaders for Trump, but team Trump ran with him being merely a coffee boy.In May 2016, Papadopoulos revealed to Australian diplomat Alexander Downer that Russian officials were shopping around possible dirt on then-Democratic presidential nominee Hillary Clinton. Exactly how much Mr. Papadopoulos said that night at the Kensington Wine Rooms with the Australian, Alexander Downer, is unclear, the report states. But two months later, when leaked Democratic emails began appearing online, Australian officials passed the information about Mr. Papadopoulos to their American counterparts, according to four current and former American and foreign officials with direct knowledge of the Australians role. Papadopoulos pleaded guilty to lying to the F.B.I. and is now a cooperating witness with Special Counsel Robert Mueller s team.This isn t a presidency. It s a badly scripted reality TV show.Photo by Win McNamee/Getty Images.', 'subject': 'News', 'fake': False}), (2, {'text': 'On Friday, it was revealed that former Milwaukee Sheriff David Clarke, who was being considered for Homeland Security Secretary in Donald Trump s administration, has an email scandal of his own.In January, there was a brief run-in on a plane between Clarke and fellow passenger Dan Black, who he later had detained by the police for no reason whatsoever, except that maybe his feelings were hurt. Clarke messaged the police to stop Black after he deplaned, and now, a search warrant has been executed by the FBI to see the exchanges.Clarke is calling it fake news even though copies of the search warrant are on the Internet. I am UNINTIMIDATED by lib media attempts to smear and discredit me with their FAKE NEWS reports designed to silence me, the former sheriff tweeted. I will continue to poke them in the eye with a sharp stick and bitch slap these scum bags til they get it. I have been attacked by better people than them #MAGA I am UNINTIMIDATED by lib media attempts to smear and discredit me with their FAKE NEWS reports designed to silence me. I will continue to poke them in the eye with a sharp stick and bitch slap these scum bags til they get it. I have been attacked by better people than them #MAGA pic.twitter.com/XtZW5PdU2b David A. Clarke, Jr. (@SheriffClarke) December 30, 2017He didn t stop there.BREAKING NEWS! When LYING LIB MEDIA makes up FAKE NEWS to smear me, the ANTIDOTE is go right at them. Punch them in the nose & MAKE THEM TASTE THEIR OWN BLOOD. Nothing gets a bully like LYING LIB MEDIA S attention better than to give them a taste of their own blood #neverbackdown pic.twitter.com/T2NY2psHCR David A. Clarke, Jr. (@SheriffClarke) December 30, 2017The internet called him out.This is your local newspaper and that search warrant isn t fake, and just because the chose not to file charges at the time doesn t mean they won t! Especially if you continue to lie. Months after decision not to charge Clarke, email search warrant filed https://t.co/zcbyc4Wp5b KeithLeBlanc (@KeithLeBlanc63) December 30, 2017I just hope the rest of the Village People aren t implicated. Kirk Ketchum (@kirkketchum) December 30, 2017Slaw, baked potatoes, or French fries? pic.twitter.com/fWfXsZupxy ALT- Immigration (@ALT_uscis) December 30, 2017pic.twitter.com/ymsOBLjfxU Pendulum Swinger (@PendulumSwngr) December 30, 2017you called your police friends to stand up for you when someone made fun of your hat Chris Jackson (@ChrisCJackson) December 30, 2017Is it me, with this masterful pshop of your hat, which I seem to never tire of. I think it s the steely resolve in your one visible eye pic.twitter.com/dWr5k8ZEZV Chris Mohney (@chrismohney) December 30, 2017Are you indicating with your fingers how many people died in your jail? I think you re a few fingers short, dipshit Ike Barinholtz (@ikebarinholtz) December 30, 2017ROFL. Internet tough guy with fake flair. pic.twitter.com/ulCFddhkdy KellMeCrazy (@Kel_MoonFace) December 30, 2017You re so edgy, buddy. Mrs. SMH (@MRSSMH2) December 30, 2017Is his break over at Applebees? Aaron (@feltrrr2) December 30, 2017Are you trying to earn your still relevant badge? CircusRebel (@CircusDrew) December 30, 2017make sure to hydrate, drink lots of water. It s rumored that prisoners can be denied water by prison officials. Robert Klinc (@RobertKlinc1) December 30, 2017Terrill Thomas, the 38-year-old black man who died of thirst in Clarke s Milwaukee County Jail cell this April, was a victim of homicide. We just thought we should point that out. It can t be repeated enough.Photo by Spencer Platt/Getty Images.', 'subject': 'News', 'fake': False}), (3, {'text': 'On Christmas day, Donald Trump announced that he would be back to work the following day, but he is golfing for the fourth day in a row. The former reality show star blasted former President Barack Obama for playing golf and now Trump is on track to outpace the number of golf games his predecessor played.Updated my tracker of Trump s appearances at Trump properties.71 rounds of golf including today s. At this pace, he ll pass Obama s first-term total by July 24 next year. https://t.co/Fg7VacxRtJ pic.twitter.com/5gEMcjQTbH Philip Bump (@pbump) December 29, 2017 That makes what a Washington Post reporter discovered on Trump s website really weird, but everything about this administration is bizarre AF. The coding contained a reference to Obama and golf: Unlike Obama, we are working to fix the problem and not on the golf course. However, the coding wasn t done correctly.The website of Donald Trump, who has spent several days in a row at the golf course, is coded to serve up the following message in the event of an internal server error: https://t.co/zrWpyMXRcz pic.twitter.com/wiQSQNNzw0 Christopher Ingraham (@_cingraham) December 28, 2017That snippet of code appears to be on all https://t.co/dkhw0AlHB4 pages, which the footer says is paid for by the RNC? pic.twitter.com/oaZDT126B3 Christopher Ingraham (@_cingraham) December 28, 2017It s also all over https://t.co/ayBlGmk65Z. As others have noted in this thread, this is weird code and it s not clear it would ever actually display, but who knows. Christopher Ingraham (@_cingraham) December 28, 2017After the coding was called out, the reference to Obama was deleted.UPDATE: The golf error message has been removed from the Trump and GOP websites. They also fixed the javascript = vs == problem. Still not clear when these messages would actually display, since the actual 404 (and presumably 500) page displays a different message pic.twitter.com/Z7dmyQ5smy Christopher Ingraham (@_cingraham) December 29, 2017That suggests someone at either RNC or the Trump admin is sensitive enough to Trump s golf problem to make this issue go away quickly once people noticed. You have no idea how much I d love to see the email exchange that led us here. Christopher Ingraham (@_cingraham) December 29, 2017 The code was f-cked up.The best part about this is that they are using the = (assignment) operator which means that bit of code will never get run. If you look a few lines up errorCode will always be 404 (@tw1trsux) December 28, 2017trump s coders can t code. Nobody is surprised. Tim Peterson (@timrpeterson) December 28, 2017Donald Trump is obsessed with Obama that his name was even in the coding of his website while he played golf again.Photo by Joe Raedle/Getty Images.', 'subject': 'News', 'fake': False}), (4, {'text': 'Pope Francis used his annual Christmas Day message to rebuke Donald Trump without even mentioning his name. The Pope delivered his message just days after members of the United Nations condemned Trump s move to recognize Jerusalem as the capital of Israel. The Pontiff prayed on Monday for the peaceful coexistence of two states within mutually agreed and internationally recognized borders. We see Jesus in the children of the Middle East who continue to suffer because of growing tensions between Israelis and Palestinians, Francis said. On this festive day, let us ask the Lord for peace for Jerusalem and for all the Holy Land. Let us pray that the will to resume dialogue may prevail between the parties and that a negotiated solution can finally be reached. The Pope went on to plead for acceptance of refugees who have been forced from their homes, and that is an issue Trump continues to fight against. Francis used Jesus for which there was no place in the inn as an analogy. Today, as the winds of war are blowing in our world and an outdated model of development continues to produce human, societal and environmental decline, Christmas invites us to focus on the sign of the Child and to recognize him in the faces of little children, especially those for whom, like Jesus, there is no place in the inn, he said. Jesus knows well the pain of not being welcomed and how hard it is not to have a place to lay one s head, he added. May our hearts not be closed as they were in the homes of Bethlehem. The Pope said that Mary and Joseph were immigrants who struggled to find a safe place to stay in Bethlehem. They had to leave their people, their home, and their land, Francis said. This was no comfortable or easy journey for a young couple about to have a child. At heart, they were full of hope and expectation because of the child about to be born; yet their steps were weighed down by the uncertainties and dangers that attend those who have to leave their home behind. So many other footsteps are hidden in the footsteps of Joseph and Mary, Francis said Sunday. We see the tracks of entire families forced to set out in our own day. We see the tracks of millions of persons who do not choose to go away, but driven from their land, leave behind their dear ones. Amen to that.Photo by Christopher Furlong/Getty Images.', 'subject': 'News', 'fake': False})]
Graph edges: [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5)]
/usr/local/lib/python3.10/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Epoch 1/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 24s 46ms/step - accuracy: 0.5544 - loss: 1.0763 - val_accuracy: 0.6888 - val_loss: 0.6310 Epoch 2/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 39s 42ms/step - accuracy: 0.7124 - loss: 0.5673 - val_accuracy: 0.6689 - val_loss: 0.6236 Epoch 3/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 23s 50ms/step - accuracy: 0.7345 - loss: 0.5071 - val_accuracy: 0.6439 - val_loss: 0.6708 Epoch 4/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 21s 44ms/step - accuracy: 0.7477 - loss: 0.4768 - val_accuracy: 0.6130 - val_loss: 0.6984 Epoch 5/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 41s 43ms/step - accuracy: 0.7670 - loss: 0.4359 - val_accuracy: 0.6154 - val_loss: 0.7341 Epoch 6/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 22s 47ms/step - accuracy: 0.7776 - loss: 0.4142 - val_accuracy: 0.5925 - val_loss: 0.7938 Epoch 7/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 20s 42ms/step - accuracy: 0.7865 - loss: 0.3914 - val_accuracy: 0.5837 - val_loss: 0.9019 Epoch 8/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 20s 41ms/step - accuracy: 0.7980 - loss: 0.3763 - val_accuracy: 0.5776 - val_loss: 0.9586 Epoch 9/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 21s 43ms/step - accuracy: 0.8018 - loss: 0.3585 - val_accuracy: 0.5691 - val_loss: 1.0940 Epoch 10/10 470/470 ━━━━━━━━━━━━━━━━━━━━ 21s 44ms/step - accuracy: 0.8080 - loss: 0.3453 - val_accuracy: 0.5640 - val_loss: 1.1431 147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 8ms/step - accuracy: 0.5739 - loss: 1.1444 Test Accuracy: 0.5614
147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 8ms/step precision recall f1-score support Government News 0.11 0.12 0.11 316 Middle-east 0.10 0.04 0.06 159 News 0.98 0.98 0.98 1821 US_News 0.38 0.58 0.46 160 left-news 0.19 0.19 0.19 897 politics 0.40 0.40 0.40 1344 accuracy 0.56 4697 macro avg 0.36 0.39 0.37 4697 weighted avg 0.56 0.56 0.56 4697
Random Forest Accuracy: 0.5359
Random Forest Classification Report:
precision recall f1-score support
Government News 0.01 0.01 0.01 316
Middle-east 0.08 0.08 0.08 159
News 0.96 0.99 0.98 1821
US_News 0.13 0.14 0.13 160
left-news 0.03 0.02 0.03 897
politics 0.38 0.49 0.43 1344
accuracy 0.54 4697
macro avg 0.27 0.29 0.28 4697
weighted avg 0.50 0.54 0.51 4697
Neural Network Model Accuracy: 0.5614 Random Forest Model Accuracy: 0.5359
In [ ]:
# Step 1: Upload the Jupyter Notebook
from google.colab import files
uploaded = files.upload()
# List uploaded files to confirm the upload
import os
print(os.listdir())
# Step 2: Convert the notebook to HTML
!jupyter nbconvert --to html RK_Fakenews.ipynb
# Step 3: Ensure the HTML file is responsive
from bs4 import BeautifulSoup
# Read the HTML file
html_file_path = 'RK_Fakenews.html'
with open(html_file_path, 'r') as file:
soup = BeautifulSoup(file, 'html.parser')
# Add responsive meta tag if not already present
if not soup.find('meta', attrs={'name': 'viewport'}):
meta_tag = soup.new_tag('meta', name='viewport', content='width=device-width, initial-scale=1.0')
soup.head.append(meta_tag)
# Write the modified HTML file
with open(html_file_path, 'w') as file:
file.write(str(soup))
print('Responsive meta tag added to the HTML file.')
# Step 4: Download the HTML file
files.download(html_file_path)
Saving RK_Fakenews.ipynb to RK_Fakenews.ipynb ['.config', 'drive', 'RK_Fakenews.ipynb', 'fake.csv', 'sample_data'] [NbConvertApp] Converting notebook RK_Fakenews.ipynb to html [NbConvertApp] Writing 4157352 bytes to RK_Fakenews.html Responsive meta tag added to the HTML file.
explination report writing
In [ ]:
# Step 1: Import Necessary Libraries
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# Step 2: Load the Dataset
df = pd.read_csv('/content/fake.csv')
# Step 3: Display Basic Information about the Dataset
# Display the first few rows of the dataset
print("First few rows of the dataset:")
print(df.head())
# Display information about the dataset
print("\nInformation about the dataset:")
print(df.info())
# Display statistical summary of the dataset
print("\nStatistical summary of the dataset:")
print(df.describe())
# Step 4: Check the Total Number of Records in the Dataset
# Total number of records in the dataset
total_records = len(df)
print(f"\nTotal number of records in the dataset: {total_records}")
# Step 5: Visualize the Distribution of Subjects
# Plot the distribution of subjects
plt.figure(figsize=(10, 6))
sns.countplot(x='subject', data=df)
plt.title('Distribution of Subjects in the Dataset')
plt.xlabel('Subject')
plt.ylabel('Count')
plt.show()
Total number of records in the dataset: 23481
title \
0 Donald Trump Sends Out Embarrassing New Year’...
1 Drunk Bragging Trump Staffer Started Russian ...
2 Sheriff David Clarke Becomes An Internet Joke...
3 Trump Is So Obsessed He Even Has Obama’s Name...
4 Pope Francis Just Called Out Donald Trump Dur...
text subject \
0 Donald Trump just couldn t wish all Americans ... News
1 House Intelligence Committee Chairman Devin Nu... News
2 On Friday, it was revealed that former Milwauk... News
3 On Christmas day, Donald Trump announced that ... News
4 Pope Francis used his annual Christmas Day mes... News
date
0 December 31, 2017
1 December 31, 2017
2 December 30, 2017
3 December 29, 2017
4 December 25, 2017
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 23481 entries, 0 to 23480
Data columns (total 4 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 title 23481 non-null object
1 text 23481 non-null object
2 subject 23481 non-null object
3 date 23481 non-null object
dtypes: object(4)
memory usage: 733.9+ KB
None
title text subject \
count 23481 23481 23481
unique 17903 17455 6
top MEDIA IGNORES Time That Bill Clinton FIRED His... News
freq 6 626 9050
date
count 23481
unique 1681
top May 10, 2017
freq 46
4.2 experimintal setup
In [ ]:
X = df['text'] # Using 'text' column for content
y = df['subject'] # Using 'subject' column as the target variable
# Encoding the target variable
label_encoder = LabelEncoder()
y_encoded = label_encoder.fit_transform(y)
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
In [ ]:
vectorizer = TfidfVectorizer(max_features=5000)
X_train_tfidf = vectorizer.fit_transform(X_train).toarray()
X_test_tfidf = vectorizer.transform(X_test).toarray()
# Display the shape of the TF-IDF matrices
print(f'TF-IDF Train shape: {X_train_tfidf.shape}')
print(f'TF-IDF Test shape: {X_test_tfidf.shape}')
TF-IDF Train shape: (18784, 5000) TF-IDF Test shape: (4697, 5000)
In [ ]:
import networkx as nx
import pandas as pd
import matplotlib.pyplot as plt
# Step 1: Creating an Empty Graph
G = nx.Graph()
# Step 2: Adding Nodes (Tweets) to the Graph
df = pd.read_csv('/content/fake.csv')
assert 'text' in df.columns, "Dataset must contain 'text' column"
assert 'subject' in df.columns, "Dataset must contain 'subject' column"
for index, row in df.iterrows():
G.add_node(index, text=row['text'], subject=row['subject'])
# Step 3: Adding Edges (Interactions) to the Graph
for i in range(len(df) - 1):
G.add_edge(i, i + 1)
# Step 4: Display the Graph Information
# Display the number of nodes and edges
print(f"Number of nodes: {G.number_of_nodes()}")
print(f"Number of edges: {G.number_of_edges()}")
# Optionally, you can use matplotlib to visualize the graph
plt.figure(figsize=(10, 7))
nx.draw(G, with_labels=True, node_size=500, node_color='lightblue', font_size=10, font_weight='bold', edge_color='gray')
plt.title("Graph Visualization")
plt.show()
Number of nodes: 23481 Number of edges: 23480
model evaluation
In [ ]:
import pandas as pd
import tensorflow as tf
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report, confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
# Load the dataset with a chunk size to handle large files
chunk_size = 10000 # Adjust based on available memory
chunks = pd.read_csv('/content/fake.csv', chunksize=chunk_size)
# Concatenate all chunks into a single DataFrame
data = pd.concat(chunks, ignore_index=True)
# Ensure data is properly loaded and handle missing values
data = data.dropna(subset=['text', 'subject'])
# Process text data
vectorizer = TfidfVectorizer(max_features=10000) # Limit the number of features
X = vectorizer.fit_transform(data['text']).toarray()
# Encode labels
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(data['subject'])
# Split the dataset into training, validation, and test sets
X_train, X_temp, y_train, y_temp = train_test_split(X, y, test_size=0.4, random_state=42)
X_val, X_test, y_val, y_test = train_test_split(X_temp, y_temp, test_size=0.5, random_state=42)
# Define the input shape and number of classes based on your data
input_shape = X_train.shape[1] # Number of features
num_classes = len(np.unique(y_train)) # Number of classes
# Define and compile the model
model = tf.keras.Sequential([
tf.keras.layers.Dense(128, activation='relu', input_shape=(input_shape,)),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(num_classes, activation='softmax')
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
history = model.fit(X_train, y_train,
epochs=10,
batch_size=32,
validation_data=(X_val, y_val))
# Evaluate the model
test_loss, test_accuracy = model.evaluate(X_test, y_test)
print(f"Test Accuracy: {test_accuracy:.2f}")
# Plot the training history
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
plt.plot(history.history['accuracy'], label='Training Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.title('Model Accuracy')
plt.subplot(1, 2, 2)
plt.plot(history.history['loss'], label='Training Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.title('Model Loss')
plt.tight_layout()
plt.show()
# Generate predictions and evaluate
test_predictions = model.predict(X_test)
test_predictions_classes = np.argmax(test_predictions, axis=1)
report = classification_report(y_test, test_predictions_classes, target_names=label_encoder.classes_)
print("Classification Report:")
print(report)
conf_matrix = confusion_matrix(y_test, test_predictions_classes)
plt.figure(figsize=(10, 7))
sns.heatmap(conf_matrix, annot=True, fmt='d', cmap='Blues', xticklabels=label_encoder.classes_, yticklabels=label_encoder.classes_)
plt.xlabel('Predicted Label')
plt.ylabel('True Label')
plt.title('Confusion Matrix')
plt.show()
/usr/local/lib/python3.10/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Epoch 1/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 14s 26ms/step - accuracy: 0.4710 - loss: 1.3259 - val_accuracy: 0.6885 - val_loss: 0.6983 Epoch 2/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 12s 28ms/step - accuracy: 0.6897 - loss: 0.6594 - val_accuracy: 0.6831 - val_loss: 0.6244 Epoch 3/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 20s 26ms/step - accuracy: 0.7175 - loss: 0.5654 - val_accuracy: 0.6808 - val_loss: 0.6342 Epoch 4/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 21s 27ms/step - accuracy: 0.7373 - loss: 0.5120 - val_accuracy: 0.6335 - val_loss: 0.6711 Epoch 5/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 11s 26ms/step - accuracy: 0.7582 - loss: 0.4752 - val_accuracy: 0.6290 - val_loss: 0.7073 Epoch 6/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 10s 23ms/step - accuracy: 0.7810 - loss: 0.4352 - val_accuracy: 0.6078 - val_loss: 0.7462 Epoch 7/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 12s 27ms/step - accuracy: 0.7980 - loss: 0.4104 - val_accuracy: 0.5928 - val_loss: 0.8048 Epoch 8/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 12s 27ms/step - accuracy: 0.8002 - loss: 0.3933 - val_accuracy: 0.6101 - val_loss: 0.8967 Epoch 9/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 13s 29ms/step - accuracy: 0.7993 - loss: 0.3897 - val_accuracy: 0.5862 - val_loss: 0.9353 Epoch 10/10 441/441 ━━━━━━━━━━━━━━━━━━━━ 21s 29ms/step - accuracy: 0.8134 - loss: 0.3684 - val_accuracy: 0.5756 - val_loss: 0.9766 147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 6ms/step - accuracy: 0.5740 - loss: 0.9902 Test Accuracy: 0.58
147/147 ━━━━━━━━━━━━━━━━━━━━ 1s 9ms/step Classification Report: precision recall f1-score support Government News 0.16 0.16 0.16 327 Middle-east 0.35 0.26 0.30 149 News 0.99 0.97 0.98 1828 US_News 0.42 0.53 0.46 150 left-news 0.21 0.20 0.20 880 politics 0.42 0.44 0.43 1363 accuracy 0.58 4697 macro avg 0.42 0.43 0.42 4697 weighted avg 0.58 0.58 0.58 4697
4.4 compirative analysis section
In [ ]:
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sns
import tensorflow as tf
# Load the dataset
data = pd.read_csv('/content/fake.csv')
# Ensure the dataset has the required columns and preprocess the text
vectorizer = TfidfVectorizer(max_features=5000) # Limit number of features to reduce memory usage
X = vectorizer.fit_transform(data['text']).toarray() # Convert text to feature vectors
# Encode labels
label_encoder = LabelEncoder()
y = label_encoder.fit_transform(data['subject'])
# Split the dataset into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42)
# 1. Training a Random Forest Classifier
rf_model = RandomForestClassifier(n_estimators=100, random_state=42, n_jobs=-1) # Use all cores for training
rf_model.fit(X_train, y_train)
# Make predictions and evaluate accuracy
rf_y_pred = rf_model.predict(X_test)
rf_accuracy = accuracy_score(y_test, rf_y_pred)
print(f'Random Forest Accuracy: {rf_accuracy:.4f}')
# 2. Detailed Performance Metrics for Random Forest
print("Random Forest Classification Report:")
print(classification_report(y_test, rf_y_pred, target_names=label_encoder.classes_))
# Generate and plot confusion matrix
cm_rf = confusion_matrix(y_test, rf_y_pred)
plt.figure(figsize=(10, 8))
sns.heatmap(cm_rf, annot=True, fmt='d', xticklabels=label_encoder.classes_, yticklabels=label_encoder.classes_)
plt.title('Confusion Matrix for Random Forest Classifier')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()
# 3. Training a Neural Network Model (For comparison)
# Define the neural network model
input_shape = X_train.shape[1] # Number of features in the dataset
num_classes = len(np.unique(y_train)) # Number of unique classes
model = tf.keras.Sequential([
tf.keras.layers.Input(shape=(input_shape,)), # Use Input layer for defining input shape
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(num_classes, activation='softmax')
])
# Compile the model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Train the model
history = model.fit(X_train, y_train, epochs=5, batch_size=32, validation_split=0.2, verbose=1) # Reduced epochs for efficiency
# Evaluate the model on the test set
test_loss, test_accuracy = model.evaluate(X_test, y_test, verbose=1)
print(f'Neural Network Test Accuracy: {test_accuracy:.4f}')
# Plot the training history for the Neural Network
plt.figure(figsize=(12, 6))
plt.subplot(1, 2, 1)
plt.plot(history.history['accuracy'], label='Training Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.title('Neural Network Model Accuracy')
plt.subplot(1, 2, 2)
plt.plot(history.history['loss'], label='Training Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.title('Neural Network Model Loss')
plt.tight_layout()
plt.show()
# Generate predictions for the Neural Network model
nn_predictions = model.predict(X_test, verbose=1)
nn_predictions_classes = np.argmax(nn_predictions, axis=1)
# Generate the classification report for the Neural Network model
nn_report = classification_report(y_test, nn_predictions_classes, target_names=label_encoder.classes_)
print("Neural Network Classification Report:")
print(nn_report)
# Generate and plot confusion matrix for Neural Network model
nn_conf_matrix = confusion_matrix(y_test, nn_predictions_classes)
plt.figure(figsize=(10, 8))
sns.heatmap(nn_conf_matrix, annot=True, fmt='d', xticklabels=label_encoder.classes_, yticklabels=label_encoder.classes_)
plt.title('Confusion Matrix for Neural Network Classifier')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()
Random Forest Accuracy: 0.5704
Random Forest Classification Report:
precision recall f1-score support
Government News 0.01 0.00 0.01 629
Middle-east 0.16 0.15 0.15 310
News 0.95 0.99 0.97 3642
US_News 0.19 0.20 0.19 305
left-news 0.08 0.05 0.06 1782
politics 0.43 0.57 0.49 2725
accuracy 0.57 9393
macro avg 0.30 0.33 0.31 9393
weighted avg 0.52 0.57 0.54 9393
Epoch 1/5 353/353 ━━━━━━━━━━━━━━━━━━━━ 7s 15ms/step - accuracy: 0.4580 - loss: 1.3544 - val_accuracy: 0.6767 - val_loss: 0.7146 Epoch 2/5 353/353 ━━━━━━━━━━━━━━━━━━━━ 6s 16ms/step - accuracy: 0.6725 - loss: 0.6972 - val_accuracy: 0.6824 - val_loss: 0.6362 Epoch 3/5 353/353 ━━━━━━━━━━━━━━━━━━━━ 5s 13ms/step - accuracy: 0.7086 - loss: 0.5921 - val_accuracy: 0.6831 - val_loss: 0.6262 Epoch 4/5 353/353 ━━━━━━━━━━━━━━━━━━━━ 6s 16ms/step - accuracy: 0.7294 - loss: 0.5372 - val_accuracy: 0.6618 - val_loss: 0.6417 Epoch 5/5 353/353 ━━━━━━━━━━━━━━━━━━━━ 7s 19ms/step - accuracy: 0.7522 - loss: 0.4950 - val_accuracy: 0.6448 - val_loss: 0.6732 294/294 ━━━━━━━━━━━━━━━━━━━━ 1s 3ms/step - accuracy: 0.6603 - loss: 0.6617 Neural Network Test Accuracy: 0.6491
294/294 ━━━━━━━━━━━━━━━━━━━━ 1s 3ms/step Neural Network Classification Report: precision recall f1-score support Government News 0.17 0.07 0.10 629 Middle-east 0.00 0.00 0.00 310 News 0.98 0.97 0.97 3642 US_News 0.49 0.97 0.65 305 left-news 0.31 0.19 0.24 1782 politics 0.49 0.69 0.58 2725 accuracy 0.65 9393 macro avg 0.41 0.48 0.42 9393 weighted avg 0.61 0.65 0.62 9393
/usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1471: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior. _warn_prf(average, modifier, msg_start, len(result)) /usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1471: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior. _warn_prf(average, modifier, msg_start, len(result)) /usr/local/lib/python3.10/dist-packages/sklearn/metrics/_classification.py:1471: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior. _warn_prf(average, modifier, msg_start, len(result))
In [ ]:
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
from sklearn.preprocessing import LabelEncoder
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.utils import plot_model
# ---- Neural Network Architecture Diagram ----
def plot_neural_network_architecture():
# Example model
model = Sequential([
Dense(64, activation='relu', input_shape=(784, )),
Dense(64, activation='relu'),
Dense(10, activation='softmax')
])
# Plot the model architecture
plot_model(model, to_file='neural_network_architecture.png', show_shapes=True, show_layer_names=True)
print("Neural network architecture diagram saved as 'neural_network_architecture.png'.")
# ---- Centroid Plot ----
def plot_centroid():
# Example data
X, _ = make_blobs(n_samples=300, centers=4, cluster_std=0.60, random_state=0)
# Fit KMeans
kmeans = KMeans(n_clusters=4)
kmeans.fit(X)
centroids = kmeans.cluster_centers_
labels = kmeans.labels_
# Plot
plt.figure(figsize=(8, 6))
plt.scatter(X[:, 0], X[:, 1], c=labels, s=50, cmap='viridis')
plt.scatter(centroids[:, 0], centroids[:, 1], c='red', s=200, alpha=0.75, marker='X')
plt.title('Centroid Plot')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.show()
# ---- Confusion Matrix Heatmap ----
def plot_confusion_matrix():
# Example data
y_true = [0, 1, 2, 2, 1, 0] # Replace with actual true labels
y_pred = [0, 0, 2, 2, 0, 0] # Replace with actual predicted labels
labels = ['class_0', 'class_1', 'class_2'] # Replace with your classes
# Compute confusion matrix
cm = confusion_matrix(y_true, y_pred)
# Plot
plt.figure(figsize=(10, 8))
sns.heatmap(cm, annot=True, fmt='d', xticklabels=labels, yticklabels=labels)
plt.title('Confusion Matrix')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()
# ---- Performance Comparison Bar Chart ----
def plot_performance_comparison():
# Example data
models = ['Neural Network', 'Random Forest']
accuracies = [0.6491, 0.5704] # Replace with actual accuracies
# Plot
plt.figure(figsize=(8, 6))
plt.bar(models, accuracies, color=['blue', 'orange'])
plt.title('Model Performance Comparison')
plt.xlabel('Model')
plt.ylabel('Accuracy')
plt.ylim(0, 1)
plt.show()
# ---- Learning Curve Plot ----
def plot_learning_curve():
# Example data
epochs = [1, 2, 3, 4, 5]
train_accuracy = [0.4580, 0.6725, 0.7086, 0.7294, 0.7522]
val_accuracy = [0.6767, 0.6824, 0.6831, 0.6618, 0.6448]
# Plot
plt.figure(figsize=(8, 6))
plt.plot(epochs, train_accuracy, 'bo-', label='Training Accuracy')
plt.plot(epochs, val_accuracy, 'ro-', label='Validation Accuracy')
plt.title('Learning Curve')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Main function to call all plotting functions
def main():
plot_neural_network_architecture()
plot_centroid()
plot_confusion_matrix()
plot_performance_comparison()
plot_learning_curve()
if __name__ == "__main__":
main()
/usr/local/lib/python3.10/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead. super().__init__(activity_regularizer=activity_regularizer, **kwargs)
Neural network architecture diagram saved as 'neural_network_architecture.png'.
/usr/local/lib/python3.10/dist-packages/sklearn/cluster/_kmeans.py:1416: FutureWarning: The default value of `n_init` will change from 10 to 'auto' in 1.4. Set the value of `n_init` explicitly to suppress the warning super()._check_params_vs_input(X, default_n_init=10)
rework diagram codes execution
In [ ]:
import matplotlib.pyplot as plt
import pandas as pd
import matplotlib.dates as mdates
from datetime import datetime, timedelta
# Define the tasks and their start/end times
tasks = {
'Chapter 1: Introduction': ['2024-08-01', '2024-08-07'],
'Chapter 2: Literature Review': ['2024-08-08', '2024-08-14'],
'Chapter 3: Research Methodology': ['2024-08-15', '2024-08-21'],
'Chapter 4: Results and Analysis': ['2024-08-22', '2024-08-24'],
'Chapter 5: Discussion': ['2024-08-24', '2024-08-28'],
'Chapter 6: Conclusion and Recommendations': ['2024-08-28', '2024-09-01'],
'Revisions and Editing': ['2024-09-01', '2024-09-03'],
'Final Submission': ['2024-09-03', '2024-09-09']
}
# Convert tasks dictionary to a DataFrame
df = pd.DataFrame(tasks, index=['Start', 'End']).T
# Convert the start and end dates to datetime objects
df['Start'] = pd.to_datetime(df['Start'])
df['End'] = pd.to_datetime(df['End'])
# Calculate the duration of each task in days
df['Duration'] = (df['End'] - df['Start']).dt.days
# Create the figure and the subplot
fig, ax = plt.subplots(figsize=(10, 6))
# Create the Gantt chart
for i, (task, row) in enumerate(df.iterrows()):
ax.barh(task, row['Duration'], left=row['Start'], color='skyblue')
# Set the date format on the x-axis
ax.xaxis.set_major_locator(mdates.WeekdayLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
# Set labels and title
ax.set_xlabel('Date')
ax.set_ylabel('Tasks')
ax.set_title('Dissertation Plan Gantt Chart')
# Rotate date labels
plt.xticks(rotation=45)
# Show the plot
plt.tight_layout()
plt.show()
In [ ]:
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from math import pi
# Data for the table
data = {
'Method': ['Content-Based', 'Metadata Analysis', 'Network-Based', 'Machine Learning'],
'Description': [
'Analyzes text features like language and keywords.',
'Examines source, date, and author information.',
'Analyzes information spread through social networks.',
'Uses algorithms trained on data to classify news.'
],
'Strengths': [
'Identifies sensational language.',
'Verifies credibility of sources.',
'Reveals spread patterns and sources.',
'Handles large datasets and adapts to tactics.'
],
'Limitations': [
'Struggles with context and nuances.',
'Limited by data accuracy.',
'May miss some misinformation tactics.',
'Needs large datasets and can be biased.'
]
}
# Create a DataFrame
df = pd.DataFrame(data)
# Plotting setup
fig, axs = plt.subplots(2, 2, figsize=(14, 12))
# Table
axs[0, 0].axis('off')
table = axs[0, 0].table(cellText=df.values, colLabels=df.columns, loc='center', cellLoc='left', bbox=[0, 0, 1, 1])
table.auto_set_font_size(False)
table.set_fontsize(10)
# Stacked Bar Chart
methods = df['Method']
strengths = [3, 2, 4, 4] # Example values for strengths
limitations = [2, 3, 3, 3] # Example values for limitations
index = np.arange(len(methods))
bar_width = 0.5
bar1 = axs[0, 1].bar(index, strengths, bar_width, label='Strengths', color='green')
bar2 = axs[0, 1].bar(index, limitations, bar_width, bottom=strengths, label='Limitations', color='red')
axs[0, 1].set_xlabel('Detection Methods')
axs[0, 1].set_ylabel('Scores')
axs[0, 1].set_title('Comparison of Fake News Detection Methods')
axs[0, 1].set_xticks(index)
axs[0, 1].set_xticklabels(methods)
axs[0, 1].legend()
# Radar Chart
attributes = ['Ease of Use', 'Context Understanding', 'Scalability', 'Accuracy']
data_radar = {
'Content-Based': [3, 2, 2, 3],
'Metadata Analysis': [2, 3, 2, 2],
'Network-Based': [4, 3, 3, 4],
'Machine Learning': [4, 2, 4, 4]
}
num_vars = len(attributes)
angles = np.linspace(0, 2 * np.pi, num_vars, endpoint=False).tolist()
angles += angles[:1]
for method, values in data_radar.items():
values += values[:1]
axs[1, 0].plot(angles, values, label=method)
axs[1, 0].fill(angles, values, alpha=0.25)
axs[1, 0].set_yticks([])
axs[1, 0].set_xticks(angles[:-1])
axs[1, 0].set_xticklabels(attributes)
axs[1, 0].set_title('Fake News Detection Methods Comparison')
axs[1, 0].legend(loc='upper right', bbox_to_anchor=(1.2, 1.1))
# Adjust layout
plt.tight_layout()
# Show the plot
plt.show()
3.1.1.
In [ ]:
from graphviz import Digraph
# Create a new directed graph
dot = Digraph(comment='Current Research Project Design')
# Define the nodes
dot.node('A', 'Content-Based Methods\n(NLP Analysis)')
dot.node('B', 'Network-Based Methods\n(Propagation Analysis)')
dot.node('C', 'Geometric Deep Learning (GDL)')
dot.node('D', 'Graph Convolutional Networks (GCNs)')
dot.node('E', 'Integration & Analysis\n(Fake News Detection)')
# Define the edges
dot.edge('A', 'C', label='Provides Content Features')
dot.edge('B', 'C', label='Provides Network Insights')
dot.edge('C', 'D', label='Utilizes GCNs for Graph Data')
dot.edge('D', 'E', label='Delivers Detection Capabilities')
dot.edge('A', 'E', label='Informs Content Analysis')
dot.edge('B', 'E', label='Informs Propagation Dynamics')
# Render and save the diagram
dot.render('current_research_project_design', format='png', cleanup=True)
# If running in an environment that supports display, you can also use:
# dot.view()
Out[ ]:
'current_research_project_design.png'
In [ ]:
import matplotlib.pyplot as plt
import networkx as nx
# Create a figure with subplots
fig, axs = plt.subplots(2, 2, figsize=(15, 15))
# Create a directed graph
G = nx.DiGraph()
# Nodes and edges for Interaction Graphs
nodes_interaction = ["Twitter Dataset", "Interaction Graphs", "Individual Social Media Posts", "Retweets, Mentions, Likes, Replies"]
edges_interaction = [("Twitter Dataset", "Interaction Graphs"),
("Interaction Graphs", "Individual Social Media Posts"),
("Individual Social Media Posts", "Retweets, Mentions, Likes, Replies")]
# Add nodes and edges for Interaction Graphs
G.add_nodes_from(nodes_interaction)
G.add_edges_from(edges_interaction)
# Draw Interaction Graph
pos_interaction = nx.spring_layout(G, seed=42)
nx.draw_networkx_nodes(G, pos_interaction, node_size=3000, node_color='lightblue', ax=axs[0, 0])
nx.draw_networkx_edges(G, pos_interaction, edgelist=edges_interaction, arrowstyle='->', arrowsize=20, edge_color='gray', ax=axs[0, 0])
nx.draw_networkx_labels(G, pos_interaction, font_size=10, font_family='sans-serif', ax=axs[0, 0])
axs[0, 0].set_title("Interaction Graphs")
# Clear G for next graph
G.clear()
# Nodes and edges for Co-occurrence Graphs
nodes_cooccurrence = ["Twitter Dataset", "Co-occurrence Graphs", "Topics, Hashtags, Keywords", "Co-occurrence or Semantic Similarity"]
edges_cooccurrence = [("Twitter Dataset", "Co-occurrence Graphs"),
("Co-occurrence Graphs", "Topics, Hashtags, Keywords"),
("Topics, Hashtags, Keywords", "Co-occurrence or Semantic Similarity")]
# Add nodes and edges for Co-occurrence Graphs
G.add_nodes_from(nodes_cooccurrence)
G.add_edges_from(edges_cooccurrence)
# Draw Co-occurrence Graph
pos_cooccurrence = nx.spring_layout(G, seed=42)
nx.draw_networkx_nodes(G, pos_cooccurrence, node_size=3000, node_color='lightgreen', ax=axs[0, 1])
nx.draw_networkx_edges(G, pos_cooccurrence, edgelist=edges_cooccurrence, arrowstyle='->', arrowsize=20, edge_color='gray', ax=axs[0, 1])
nx.draw_networkx_labels(G, pos_cooccurrence, font_size=10, font_family='sans-serif', ax=axs[0, 1])
axs[0, 1].set_title("Co-occurrence Graphs")
# Clear G for next graph
G.clear()
# Nodes and edges for Content Graphs
nodes_content = ["Twitter Dataset", "Content Graphs", "Topics, Keywords, Hashtags", "Co-occurrence or Semantic Relationships"]
edges_content = [("Twitter Dataset", "Content Graphs"),
("Content Graphs", "Topics, Keywords, Hashtags"),
("Topics, Keywords, Hashtags", "Co-occurrence or Semantic Relationships")]
# Add nodes and edges for Content Graphs
G.add_nodes_from(nodes_content)
G.add_edges_from(edges_content)
# Draw Content Graph
pos_content = nx.spring_layout(G, seed=42)
nx.draw_networkx_nodes(G, pos_content, node_size=3000, node_color='lightcoral', ax=axs[1, 0])
nx.draw_networkx_edges(G, pos_content, edgelist=edges_content, arrowstyle='->', arrowsize=20, edge_color='gray', ax=axs[1, 0])
nx.draw_networkx_labels(G, pos_content, font_size=10, font_family='sans-serif', ax=axs[1, 0])
axs[1, 0].set_title("Content Graphs")
# Clear G for next graph
G.clear()
# Nodes and edges for User Influence Graphs
nodes_influence = ["Twitter Dataset", "User Influence Graphs", "Users", "Engagement Metrics"]
edges_influence = [("Twitter Dataset", "User Influence Graphs"),
("User Influence Graphs", "Users"),
("Users", "Engagement Metrics")]
# Add nodes and edges for User Influence Graphs
G.add_nodes_from(nodes_influence)
G.add_edges_from(edges_influence)
# Draw User Influence Graph
pos_influence = nx.spring_layout(G, seed=42)
nx.draw_networkx_nodes(G, pos_influence, node_size=3000, node_color='lightyellow', ax=axs[1, 1])
nx.draw_networkx_edges(G, pos_influence, edgelist=edges_influence, arrowstyle='->', arrowsize=20, edge_color='gray', ax=axs[1, 1])
nx.draw_networkx_labels(G, pos_influence, font_size=10, font_family='sans-serif', ax=axs[1, 1])
axs[1, 1].set_title("User Influence Graphs")
# Adjust layout and show plot
plt.tight_layout()
plt.show()
3.3 data preprocessing
columan names
In [ ]:
import pandas as pd
# Load the dataset
file_path = '/content/drive/MyDrive/Project/fake.csv'
df = pd.read_csv(file_path)
# Display the column names
print("Column names in the dataset:")
print(df.columns)
Column names in the dataset: Index(['title', 'text', 'subject', 'date'], dtype='object')
3.4
In [ ]:
import matplotlib.pyplot as plt
import networkx as nx
# Create a new graph
G = nx.Graph()
# Add nodes with labels
G.add_node("Twitter Dataset", pos=(0, 3))
G.add_node("Textual & Network Data", pos=(0, 2))
G.add_node("Graph Construction", pos=(-2, 1))
G.add_node("Graph Convolutional Networks (GCNs)", pos=(0, 0))
G.add_node("Node & Edge Representation", pos=(2, 1))
G.add_node("Local & Global Pattern Capture", pos=(-2, -1))
G.add_node("Enhanced Feature Learning", pos=(0, -1))
G.add_node("Robustness to Graph Variability", pos=(2, -1))
G.add_node("Scalability & Adaptability", pos=(0, -2))
G.add_node("Fake News Detection", pos=(0, -3))
# Add edges between nodes
G.add_edge("Twitter Dataset", "Textual & Network Data")
G.add_edge("Textual & Network Data", "Graph Construction")
G.add_edge("Textual & Network Data", "Node & Edge Representation")
G.add_edge("Graph Construction", "Graph Convolutional Networks (GCNs)")
G.add_edge("Node & Edge Representation", "Graph Convolutional Networks (GCNs)")
G.add_edge("Graph Convolutional Networks (GCNs)", "Local & Global Pattern Capture")
G.add_edge("Graph Convolutional Networks (GCNs)", "Enhanced Feature Learning")
G.add_edge("Graph Convolutional Networks (GCNs)", "Robustness to Graph Variability")
G.add_edge("Graph Convolutional Networks (GCNs)", "Scalability & Adaptability")
G.add_edge("Local & Global Pattern Capture", "Fake News Detection")
G.add_edge("Enhanced Feature Learning", "Fake News Detection")
G.add_edge("Robustness to Graph Variability", "Fake News Detection")
G.add_edge("Scalability & Adaptability", "Fake News Detection")
# Get node positions
pos = nx.get_node_attributes(G, 'pos')
# Draw the graph
plt.figure(figsize=(10, 8))
nx.draw(G, pos, with_labels=True, node_color='lightblue', node_size=3000, font_size=10, font_weight='bold', edge_color='gray')
plt.title('GCNs for Fake News Detection Architecture', fontsize=16)
plt.show()
3.6
In [ ]:
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
# Create the figure and axes
fig, ax = plt.subplots(figsize=(12, 8))
# Draw rectangles representing the different validation techniques
ax.add_patch(mpatches.Rectangle((0.1, 0.7), 0.3, 0.2, color='lightblue', ec='black'))
ax.add_patch(mpatches.Rectangle((0.6, 0.7), 0.3, 0.2, color='lightgreen', ec='black'))
ax.add_patch(mpatches.Rectangle((0.1, 0.4), 0.3, 0.2, color='lightcoral', ec='black'))
ax.add_patch(mpatches.Rectangle((0.6, 0.4), 0.3, 0.2, color='lightgoldenrodyellow', ec='black'))
# Draw rectangles representing the performance metrics
ax.add_patch(mpatches.Rectangle((0.35, 0.1), 0.3, 0.2, color='lightgrey', ec='black'))
# Add text labels
ax.text(0.25, 0.8, 'K-Fold Cross-Validation', fontsize=12, ha='center')
ax.text(0.75, 0.8, 'Stratified Sampling', fontsize=12, ha='center')
ax.text(0.25, 0.5, 'Holdout Validation', fontsize=12, ha='center')
ax.text(0.75, 0.5, 'Performance Metrics', fontsize=12, ha='center')
ax.text(0.5, 0.2, 'Precision, Recall, F1 Score,\nAUC-ROC, AUC-PR', fontsize=12, ha='center')
# Add arrows to show the flow of processes
ax.annotate('', xy=(0.35, 0.7), xytext=(0.5, 0.55), arrowprops=dict(facecolor='black', arrowstyle='->'))
ax.annotate('', xy=(0.65, 0.7), xytext=(0.5, 0.55), arrowprops=dict(facecolor='black', arrowstyle='->'))
ax.annotate('', xy=(0.35, 0.4), xytext=(0.5, 0.55), arrowprops=dict(facecolor='black', arrowstyle='->'))
ax.annotate('', xy=(0.65, 0.4), xytext=(0.5, 0.55), arrowprops=dict(facecolor='black', arrowstyle='->'))
# Set limits and remove axes for clarity
ax.set_xlim(0, 1)
ax.set_ylim(0, 1)
ax.axis('off')
# Display the diagram
plt.title('Validation Techniques and Performance Metrics for GCNs in Fake News Detection', fontsize=14)
plt.show()
4.4
In [ ]:
import matplotlib.pyplot as plt
import networkx as nx
# Create a directed graph
G = nx.DiGraph()
# Add nodes for models
G.add_node("Comparative Analysis", pos=(0, 0), size=1500)
G.add_node("Neural Network Model", pos=(-2, -2), size=2000)
G.add_node("Random Forest Classifier", pos=(2, -2), size=2000)
# Add nodes for sub-components of Neural Network Model
G.add_node("Deep Learning\nArchitecture", pos=(-4, -4), size=1500)
G.add_node("Complex Pattern\nLearning", pos=(-2, -6), size=1500)
G.add_node("High Generalization\nCapability", pos=(-4, -8), size=1500)
# Add nodes for sub-components of Random Forest Classifier
G.add_node("Ensemble Learning\nMethod", pos=(4, -4), size=1500)
G.add_node("Robustness and\nEase of Training", pos=(2, -6), size=1500)
G.add_node("Feature Engineering\nRequirement", pos=(4, -8), size=1500)
# Add edges between nodes
G.add_edges_from([
("Comparative Analysis", "Neural Network Model"),
("Comparative Analysis", "Random Forest Classifier"),
("Neural Network Model", "Deep Learning\nArchitecture"),
("Neural Network Model", "Complex Pattern\nLearning"),
("Neural Network Model", "High Generalization\nCapability"),
("Random Forest Classifier", "Ensemble Learning\nMethod"),
("Random Forest Classifier", "Robustness and\nEase of Training"),
("Random Forest Classifier", "Feature Engineering\nRequirement"),
])
# Draw the graph with custom positions
pos = nx.get_node_attributes(G, 'pos')
sizes = [G.nodes[node]['size'] for node in G.nodes]
nx.draw(G, pos, with_labels=True, node_size=sizes, node_color="lightblue", font_size=8, font_weight="bold", arrowsize=15)
# Show the diagram
plt.title("Comparative Analysis between Neural Network and Random Forest Classifier")
plt.show()
- discussion
In [ ]:
from graphviz import Digraph
# Create a new directed graph
dot = Digraph(comment='Chapter 5: Output Section')
# Set global graph attributes
dot.attr(rankdir='LR', fontsize='12')
# Define nodes with attributes
dot.node('KeyFindings', 'Key Findings', shape='box', style='filled', fillcolor='lightgreen')
dot.node('PerformanceMetrics', 'Performance Metrics', shape='box', style='filled', fillcolor='lightyellow')
dot.node('Implications', 'Implications', shape='box', style='filled', fillcolor='lightcoral')
dot.node('Recommendations', 'Recommendations', shape='box', style='filled', fillcolor='lightblue')
dot.node('FutureWork', 'Future Work', shape='box', style='filled', fillcolor='lightpink')
# Define edges with labels
dot.edge('KeyFindings', 'PerformanceMetrics', label='Inform')
dot.edge('KeyFindings', 'Implications', label='Lead to')
dot.edge('KeyFindings', 'Recommendations', label='Support')
dot.edge('KeyFindings', 'FutureWork', label='Suggests')
dot.edge('PerformanceMetrics', 'Implications', label='Determine')
dot.edge('PerformanceMetrics', 'Recommendations', label='Guide')
dot.edge('PerformanceMetrics', 'FutureWork', label='Influence')
dot.edge('Implications', 'Recommendations', label='Drive')
dot.edge('Implications', 'FutureWork', label='Identify')
dot.edge('Recommendations', 'FutureWork', label='Encourage')
# Render the graph to a file and display
dot.render('output_section_diagram', format='png', cleanup=True)
print("Diagram has been generated and saved as 'output_section_diagram.png'")
Diagram has been generated and saved as 'output_section_diagram.png'
6.1
In [ ]:
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
# Create a summary DataFrame
data = {
'Model': ['Neural Network', 'Random Forest'],
'Accuracy (%)': [64.91, 57.04],
'Precision': [0.85, 0.78], # Example values
'Recall': [0.83, 0.76], # Example values
'F1-Score': [0.84, 0.77] # Example values
}
df_summary = pd.DataFrame(data)
# Example Data for Comparative Metrics Plot
models = ['Neural Network', 'Random Forest']
metrics = ['Accuracy', 'Precision', 'Recall', 'F1-Score']
# Example performance values for the plot
values_nn = [64.91, 0.85, 0.83, 0.84]
values_rf = [57.04, 0.78, 0.76, 0.77]
# Creating a DataFrame for plotting
df_metrics = pd.DataFrame({
'Metric': metrics,
'Neural Network': values_nn,
'Random Forest': values_rf
})
# Example Data for Validation Techniques Plot
validation_techniques = ['k-fold Cross-Validation', 'Stratified Sampling', 'Holdout Validation']
performance = [90, 85, 80] # Example performance values
# Creating a DataFrame for plotting
df_validation = pd.DataFrame({
'Technique': validation_techniques,
'Performance (%)': performance
})
# Set up the figure and axes for multiple plots
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 12))
fig.suptitle('Summary of Model Performance and Validation Techniques', fontsize=16)
# Plot the summary table
ax1 = axes[0, 0]
ax1.axis('tight')
ax1.axis('off')
table_data = df_summary.values
table_columns = df_summary.columns
table = ax1.table(cellText=table_data, colLabels=table_columns, cellLoc='center', loc='center', bbox=[0, 0, 1, 1])
table.auto_set_font_size(False)
table.set_fontsize(10)
table.scale(1.2, 1.2)
ax1.set_title('Model Performance Summary')
# Plot the comparative metrics
ax2 = axes[0, 1]
df_metrics.set_index('Metric').plot(kind='bar', ax=ax2, color=['#1f77b4', '#ff7f0e'])
ax2.set_title('Comparative Metrics for Fake News Detection Models')
ax2.set_ylabel('Scores')
ax2.set_xticklabels(df_metrics['Metric'], rotation=45)
# Plot the validation techniques
ax3 = axes[1, 0]
sns.barplot(x='Technique', y='Performance (%)', data=df_validation, palette='viridis', ax=ax3)
ax3.set_title('Performance of Validation Techniques')
ax3.set_xlabel('Validation Technique')
ax3.set_ylabel('Performance (%)')
ax3.set_xticklabels(df_validation['Technique'], rotation=45)
# Plot placeholder (empty) for the fourth subplot if needed
ax4 = axes[1, 1]
ax4.axis('off') # This can be used for additional plots or information
# Adjust layout and show the plots
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.show()
<ipython-input-9-be5f702ada8a>:66: FutureWarning: Passing `palette` without assigning `hue` is deprecated and will be removed in v0.14.0. Assign the `x` variable to `hue` and set `legend=False` for the same effect. sns.barplot(x='Technique', y='Performance (%)', data=df_validation, palette='viridis', ax=ax3) <ipython-input-9-be5f702ada8a>:70: UserWarning: FixedFormatter should only be used together with FixedLocator ax3.set_xticklabels(df_validation['Technique'], rotation=45)
6.2
In [ ]:
import matplotlib.pyplot as plt
import pandas as pd
from pandas.plotting import table
import matplotlib.patches as patches
# Create a figure and axis for both the table and flowchart
fig, ax = plt.subplots(figsize=(14, 12))
# Table Data
data = {
'Recommendation': [
'Enhanced Model Architectures',
'Extended Dataset',
'Feature Engineering',
'Real-World Testing',
'Addressing Class Imbalance',
'Explainability and Interpretability'
],
'Details': [
'Explore hybrid models combining GCNs with Transformers for better detection.',
'Expand dataset to include diverse news sources and multilingual data.',
'Develop advanced feature extraction techniques including user behavior and network dynamics.',
'Implement models in live scenarios on social media platforms for practical insights.',
'Use techniques like SMOTE to handle class imbalances and improve detection across all classes.',
'Improve model transparency to build trust and facilitate better understanding of predictions.'
]
}
# Create DataFrame
df = pd.DataFrame(data)
# Plot table
ax_table = plt.subplot2grid((4, 1), (0, 0), rowspan=2)
ax_table.xaxis.set_visible(False) # hide the x axis
ax_table.yaxis.set_visible(False) # hide the y axis
ax_table.set_frame_on(False) # no visible frame
tabla = table(ax_table, df, loc='center', cellLoc='left', colWidths=[0.3, 0.6])
tabla.auto_set_font_size(False)
tabla.set_fontsize(10)
tabla.scale(1.2, 1.2) # Table size
ax_table.set_title('Recommendations for Future Research', fontsize=14)
# Flowchart Data
recommendations = [
('Enhanced Model Architectures', 'Explore hybrid models combining GCNs with Transformers.'),
('Extended Dataset', 'Expand dataset to include diverse news sources and multilingual data.'),
('Feature Engineering', 'Develop advanced feature extraction techniques including user behavior and network dynamics.'),
('Real-World Testing', 'Implement models in live scenarios on social media platforms for practical insights.'),
('Addressing Class Imbalance', 'Use techniques like SMOTE to handle class imbalances and improve detection across all classes.'),
('Explainability and Interpretability', 'Improve model transparency to build trust and facilitate better understanding of predictions.')
]
# Plot flowchart
ax_flowchart = plt.subplot2grid((4, 1), (2, 0), rowspan=2)
box_width = 0.6
box_height = 0.1
x_start = 0.1
y_start = 0.8
y_gap = 0.15
for i, (title, details) in enumerate(recommendations):
y_position = y_start - i * y_gap
rect = patches.FancyBboxPatch((x_start, y_position), box_width, box_height, boxstyle="round,pad=0.05", edgecolor='black', facecolor='lightgrey')
ax_flowchart.add_patch(rect)
ax_flowchart.text(x_start + 0.02, y_position + 0.05, title, fontsize=12, weight='bold')
ax_flowchart.text(x_start + 0.02, y_position - 0.02, details, fontsize=10, wrap=True)
# Set plot properties
ax_flowchart.set_xlim(0, 1)
ax_flowchart.set_ylim(0, 1)
ax_flowchart.set_aspect('equal')
ax_flowchart.axis('off')
ax_flowchart.set_title('Future Research Recommendations Flowchart', fontsize=14)
plt.tight_layout()
plt.show()
<ipython-input-10-920928d97ecc>:33: MatplotlibDeprecationWarning: Auto-removal of overlapping axes is deprecated since 3.6 and will be removed two minor releases later; explicitly call ax.remove() as needed. ax_table = plt.subplot2grid((4, 1), (0, 0), rowspan=2)
6.3
In [ ]:
import matplotlib.pyplot as plt
import seaborn as sns
import networkx as nx
# Ensure Seaborn is set up for a better visual style
sns.set(style="whitegrid")
# Create a figure with three subplots arranged in a 1x3 grid
fig, axs = plt.subplots(1, 3, figsize=(24, 8)) # Adjust figsize for better spacing
fig.suptitle('Model Evaluation and GCN Application', fontsize=16)
# 1. Bar Chart: Model Accuracy Comparison
models = ['Neural Network', 'Random Forest']
accuracy = [64.91, 57.04] # Example accuracy values
sns.barplot(x=models, y=accuracy, palette="viridis", ax=axs[0])
axs[0].set_title('Model Accuracy Comparison')
axs[0].set_xlabel('Model')
axs[0].set_ylabel('Accuracy (%)')
axs[0].set_ylim(0, 100)
# 2. Pie Chart: Model Strengths and Weaknesses
labels = ['Neural Network - High Accuracy', 'Random Forest - Robustness', 'Random Forest - Interpretability']
sizes = [60, 25, 15] # Example values for illustration
colors = ['#ff9999','#66b3ff','#99ff99']
explode = (0.1, 0, 0) # explode 1st slice
axs[1].pie(sizes, explode=explode, labels=labels, colors=colors, autopct='%1.1f%%', shadow=True, startangle=140)
axs[1].set_title('Model Strengths and Weaknesses')
# 3. Network Graph: Example of GCN Application
G = nx.Graph()
# Adding nodes and edges
G.add_nodes_from(['User A', 'User B', 'User C', 'Article 1', 'Article 2'])
G.add_edges_from([
('User A', 'Article 1'),
('User B', 'Article 1'),
('User C', 'Article 2'),
('User A', 'User B'),
('User B', 'User C')
])
pos = nx.spring_layout(G, seed=42) # Layout for better visualization
nx.draw(G, pos, with_labels=True, node_color='lightblue', edge_color='gray', node_size=2000, font_size=10, font_weight='bold', ax=axs[2])
axs[2].set_title('Network Graph for GCN Application')
# Adjust layout to prevent overlap
plt.tight_layout(rect=[0, 0, 1, 0.95]) # Leave space for suptitle
# Show the combined figure
plt.show()
<ipython-input-11-ecc11528cde3>:16: FutureWarning: Passing `palette` without assigning `hue` is deprecated and will be removed in v0.14.0. Assign the `x` variable to `hue` and set `legend=False` for the same effect. sns.barplot(x=models, y=accuracy, palette="viridis", ax=axs[0])
upload ipynb file to convert .html file
In [ ]:
from google.colab import files
import nbformat
from nbconvert import HTMLExporter
import os
def upload_notebook():
uploaded = files.upload()
# Save the uploaded file
for filename in uploaded.keys():
with open(filename, 'wb') as f:
f.write(uploaded[filename])
return filename # Return the filename of the uploaded notebook
def convert_notebook_to_html(notebook_path, output_path):
with open(notebook_path, 'r') as notebook_file:
notebook_content = notebook_file.read()
notebook_node = nbformat.reads(notebook_content, as_version=4)
html_exporter = HTMLExporter()
body, resources = html_exporter.from_notebook_node(notebook_node)
with open(output_path, 'w') as html_file:
html_file.write(body)
print(f"Notebook converted to HTML and saved as {output_path}")
# Main Execution
notebook_path = upload_notebook() # Upload the notebook
if notebook_path:
output_path = notebook_path.replace('.ipynb', '.html')
convert_notebook_to_html(notebook_path, output_path) # Convert to HTML
# Automatically download the HTML file
files.download(output_path)
else:
print("No file selected.")
Saving RK_Fakenews.ipynb to RK_Fakenews (1).ipynb Notebook converted to HTML and saved as RK_Fakenews (1).html
In [ ]:
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from datetime import datetime, timedelta
# Create a list of task names and their start/end dates
tasks = [
{"Task": "Chapter 1: Introduction", "Start": "2024-07-25", "End": "2024-07-31"},
{"Task": "Chapter 2: Literature Review", "Start": "2024-08-01", "End": "2024-08-07"},
{"Task": "Chapter 3: Methodology", "Start": "2024-08-08", "End": "2024-08-14"},
{"Task": "Chapter 4: Results", "Start": "2024-08-15", "End": "2024-08-21"},
{"Task": "Chapter 5: Discussion", "Start": "2024-08-22", "End": "2024-08-28"},
{"Task": "Chapter 6: Conclusion", "Start": "2024-08-29", "End": "2024-09-04"},
{"Task": "Finalizing & Appendices", "Start": "2024-09-05", "End": "2024-09-08"}
]
# Convert date strings to datetime objects
for task in tasks:
task['Start'] = datetime.strptime(task['Start'], "%Y-%m-%d")
task['End'] = datetime.strptime(task['End'], "%Y-%m-%d")
# Create figure and axis
fig, ax = plt.subplots(figsize=(12, 6))
# Create a color palette for the tasks
colors = plt.cm.tab20.colors
# Iterate over the tasks and add them to the Gantt chart
for i, task in enumerate(tasks):
ax.barh(task["Task"], (task["End"] - task["Start"]).days + 1, left=task["Start"], color=colors[i % len(colors)])
# Format the date axis
ax.xaxis.set_major_locator(mdates.WeekdayLocator())
ax.xaxis.set_major_formatter(mdates.DateFormatter('%b %d'))
plt.xticks(rotation=45)
# Set labels
ax.set_xlabel('Timeline')
ax.set_ylabel('Tasks')
ax.set_title('8-Week Dissertation Plan (July 25th, 2024 - September 8th, 2024)')
# Show plot
plt.tight_layout()
plt.show()